pangxiell created SINGA-260:
-------------------------------

             Summary: Caffe layer featmap  VS Singa layer featmap
                 Key: SINGA-260
                 URL: https://issues.apache.org/jira/browse/SINGA-260
             Project: Singa
          Issue Type: Bug
         Environment: ubuntu 14.04 singa1.0 cuda8.0
            Reporter: pangxiell


HI Singa developer, please help me! Thank you!

I want to convert caffe trained model to singa.
After converting vgg16_deploy.prototxt and vgg16_trained.caffemodel to singa,
i test each layer featuremap between Caffe and Singa. I found something strage 
that same input , same weights same bias , but these two framework just get 
different result ! 
All my computation done in CUDNN5.1 and cuda 8.0.
bellow is conv_1_1 featuremap and params and caffe net.prototxt
I am new to CNN, any help will be very pleasure. Thank you!


conv_1_1 output featuremap in Caffe :
[[[[ 0.          1.32925057  1.29131138 ...,  1.3344599   1.3344599
     1.65958416]
   [ 0.          0.          0.         ...,  0.          0.          0.        
]
   [ 0.          0.          0.         ...,  0.          0.          0.        
]
   ..., 
   [ 0.          0.          0.         ...,  0.          0.          0.        
]
   [ 0.          0.          0.         ...,  0.          0.          0.        
]
   [ 0.          0.          0.         ...,  0.          0.          0.        
]]

  [[ 0.          0.          0.         ...,  0.          0.          
2.18781996]
   [ 0.          1.65851605  1.64387    ...,  1.62676132  1.62676132
     3.40298295]
   [ 0.          1.71282542  1.80268919 ...,  1.62676132  1.62676132
     3.40298295]
   ..., 
   [ 0.          1.53555477  1.52216256 ...,  1.62676132  1.62676132
     3.40298295]
   [ 0.          1.51878488  1.54439628 ...,  1.62676132  1.62676132
     3.40298295]
   [ 0.45330271  2.44646335  2.46565628 ...,  2.5640552   2.5640552
     3.23327875]]

  [[ 0.          0.          0.         ...,  0.          0.          0.        
]
   [ 0.          0.          0.         ...,  0.          0.          0.        
]
   [ 0.          0.          0.         ...,  0.          0.          0.        
]
   ..., 
   [ 0.          0.          0.         ...,  0.          0.          0.        
]
   [ 0.          0.          0.         ...,  0.          0.          0.        
]
   [ 0.          0.          0.         ...,  0.          0.          0.        
]]

  ..., 
  [[ 0.          1.15822101  1.12836289 ...,  1.15823257  1.15823257
     2.62147641]
   [ 0.          0.32776707  0.3402763  ...,  0.32900044  0.32900044
     4.58077908]
   [ 0.          0.37061766  0.20697321 ...,  0.32900044  0.32900044
     4.58077908]
   ..., 
   [ 0.          0.29182461  0.26635617 ...,  0.32900044  0.32900044
     4.58077908]
   [ 0.          0.24839559  0.28039235 ...,  0.32900044  0.32900044
     4.58077908]
   [ 0.          0.          0.         ...,  0.          0.          
0.58074397]]

  [[ 4.1525259   0.98238343  0.97233886 ...,  0.9321624   0.9321624
     0.80502194]
   [ 3.1123457   0.          0.         ...,  0.          0.          0.        
]
   [ 3.10493231  0.          0.         ...,  0.          0.          0.        
]
   ..., 
   [ 2.91378093  0.          0.         ...,  0.          0.          0.        
]
   [ 2.90884662  0.          0.         ...,  0.          0.          0.        
]
   [ 1.78133321  0.          0.         ...,  0.          0.          0.        
]]

  [[ 4.23725843  3.92370176  3.87382889 ...,  3.92354441  3.92354441
     1.62559974]
   [ 5.06026077  5.14032888  5.14915466 ...,  5.25364971  5.25364971
     3.15589523]
   [ 4.98454475  5.13233662  5.07306051 ...,  5.25364971  5.25364971
     3.15589523]
   ..., 
   [ 4.92585659  5.04519129  5.08135509 ...,  5.25364971  5.25364971
     3.15589523]
   [ 4.95370245  5.03885031  5.08303356 ...,  5.25364971  5.25364971
     3.15589523]
   [ 3.74774575  4.78569221  4.83339024 ...,  5.0071063   5.0071063
     1.9427563 ]]]]
conv1_1 weight of Caffe :
[[[[ 0.00850634  0.00754428  0.0104667 ]
   [ 0.00282496 -0.0027129   0.00276441]
   [-0.00195265  0.00433889 -0.00262078]]

  [[ 0.0006124  -0.00466853  0.00038344]
   [-0.00467937  0.00585917  0.00812106]
   [ 0.00750268 -0.00080539  0.00109091]]

  [[ 0.02275514 -0.00033002 -0.00400474]
   [-0.00717515  0.01546651 -0.00067181]
   [-0.01527147 -0.01406416 -0.0043859 ]]]


 [[[ 0.01347855 -0.00352572 -0.00838911]
   [ 0.00233354  0.005205    0.00387072]
   [-0.00714711  0.00596602  0.01117658]]

  [[-0.01879554  0.004475   -0.00248052]
   [-0.00654012 -0.00028569  0.00818573]
   [-0.01255068  0.01555492 -0.01060243]]

  [[ 0.00812689 -0.01313079 -0.00390115]
   [-0.01521709 -0.00033092  0.00772838]
   [ 0.00827447 -0.01116868  0.00906964]]]


 [[[-0.01065893  0.01276438  0.00548413]
   [ 0.00571688  0.01419119 -0.00096069]
   [ 0.00900401  0.00684223  0.00768703]]

  [[-0.00515114  0.00051591 -0.00681952]
   [ 0.00158043  0.01539363  0.00268879]
   [-0.00661799  0.00388905 -0.01511758]]

  [[ 0.00058433  0.00270303 -0.01194096]
   [ 0.01680594 -0.00635558 -0.00678913]
   [-0.00523596  0.00598517 -0.0058665 ]]]


 ..., 
 [[[-0.00287419 -0.00421872  0.01433577]
   [ 0.00379841  0.01539007  0.01394418]
   [-0.00034302 -0.00515043  0.01400452]]

  [[ 0.00740621 -0.00147624  0.00551567]
   [-0.01737775  0.00381153 -0.00750302]
   [-0.02425117 -0.00245259 -0.01052278]]

  [[ 0.00449902 -0.01933564  0.0067264 ]
   [ 0.00857356 -0.00080291 -0.00353165]
   [-0.00313204 -0.00219617  0.00922761]]]


 [[[-0.00592723  0.00357399 -0.00755919]
   [-0.00429552 -0.00372033  0.00735837]
   [ 0.00408205 -0.00690898 -0.00823291]]

  [[ 0.00788845  0.02441542 -0.00630782]
   [ 0.01927572 -0.00438485 -0.00568748]
   [ 0.00435551 -0.006596    0.00158271]]

  [[ 0.00721608 -0.00344833  0.00014568]
   [ 0.00832353 -0.0067124  -0.010458  ]
   [-0.00227656 -0.00796929  0.01343615]]]


 [[[-0.00255021  0.02384895 -0.00994168]
   [-0.00690358 -0.0033091  -0.00871819]
   [-0.00353378  0.00959968 -0.00176385]]

  [[-0.01070219 -0.01495273 -0.00126908]
   [-0.00315238  0.00230987 -0.00847946]
   [ 0.00717464  0.00052554  0.00878843]]

  [[ 0.00727798 -0.0137497   0.0097814 ]
   [ 0.00240273  0.0024867  -0.01207745]
   [ 0.0045142  -0.02454388  0.00163318]]]]
conv1_1 bias of Caffe :
 [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.]



conv_1_1 output featuremap in Singa :
[[[[-1.15990531  1.32925057  1.29131138 ...,  1.3344599   1.3344599
     1.65958416]
   [-1.92470229 -3.24402356 -3.19016767 ..., -3.14230347 -3.14230347
    -2.3147471 ]
   [-2.00432348 -3.56044745 -3.69492149 ..., -3.14230347 -3.14230347
    -2.3147471 ]
   ..., 
   [-1.73503172 -2.94724107 -3.00977921 ..., -3.14230347 -3.14230347
    -2.3147471 ]
   [-1.73839962 -2.96330857 -2.96887565 ..., -3.14230347 -3.14230347
    -2.3147471 ]
   [-3.8452611  -6.39361048 -6.45467329 ..., -6.70470476 -6.70470476
    -5.17954683]]

  [[-4.45994806 -1.13598835 -1.14740443 ..., -1.13741207 -1.13741207
     2.18781996]
   [-1.40371311  1.65851605  1.64387    ...,  1.62676132  1.62676132
     3.40298295]
   [-1.35754728  1.71282542  1.80268919 ...,  1.62676132  1.62676132
     3.40298295]
   ..., 
   [-1.26869226  1.53555477  1.52216256 ...,  1.62676132  1.62676132
     3.40298295]
   [-1.30324793  1.51878488  1.54439628 ...,  1.62676132  1.62676132
     3.40298295]
   [ 0.45330271  2.44646335  2.46565628 ...,  2.5640552   2.5640552
     3.23327875]]

  [[-1.58399725 -3.95110941 -3.98022842 ..., -3.97811508 -3.97811508
    -6.28557253]
   [-1.47556436 -2.40914106 -2.4150188  ..., -2.37049937 -2.37049937
    -6.42171335]
   [-1.64208531 -2.60111237 -2.59987879 ..., -2.37049937 -2.37049937
    -6.42171335]
   ..., 
   [-1.31520605 -2.15799737 -2.18744612 ..., -2.37049937 -2.37049937
    -6.42171335]
   [-1.32342124 -2.1652441  -2.22677469 ..., -2.37049937 -2.37049937
    -6.42171335]
   [-1.14510441 -2.52267647 -2.55242896 ..., -2.69865441 -2.69865441
    -5.12770367]]

  ..., 
  [[-2.15458727  1.15822101  1.12836289 ...,  1.15823257  1.15823257
     2.62147641]
   [-1.88057959  0.32776707  0.3402763  ...,  0.32900044  0.32900044
     4.58077908]
   [-1.8912673   0.37061766  0.20697321 ...,  0.32900044  0.32900044
     4.58077908]
   ..., 
   [-1.80236626  0.29182461  0.26635617 ...,  0.32900044  0.32900044
     4.58077908]
   [-1.77869821  0.24839559  0.28039235 ...,  0.32900044  0.32900044
     4.58077908]
   [-1.43230343 -2.1616652  -2.18854642 ..., -2.27060723 -2.27060723
     0.58074397]]

  [[ 4.1525259   0.98238343  0.97233886 ...,  0.9321624   0.9321624
     0.80502194]
   [ 3.1123457  -1.32611859 -1.39405644 ..., -1.36906409 -1.36906409
    -2.84570265]
   [ 3.10493231 -1.32919359 -1.42399073 ..., -1.36906409 -1.36906409
    -2.84570265]
   ..., 
   [ 2.91378093 -1.27367365 -1.24672067 ..., -1.36906409 -1.36906409
    -2.84570265]
   [ 2.90884662 -1.2343148  -1.26084697 ..., -1.36906409 -1.36906409
    -2.84570265]
   [ 1.78133321 -1.90749574 -1.9271915  ..., -2.06096601 -2.06096601
    -4.66862774]]

  [[ 4.23725843  3.92370176  3.87382889 ...,  3.92354441  3.92354441
     1.62559974]
   [ 5.06026077  5.14032888  5.14915466 ...,  5.25364971  5.25364971
     3.15589523]
   [ 4.98454475  5.13233662  5.07306051 ...,  5.25364971  5.25364971
     3.15589523]
   ..., 
   [ 4.92585659  5.04519129  5.08135509 ...,  5.25364971  5.25364971
     3.15589523]
   [ 4.95370245  5.03885031  5.08303356 ...,  5.25364971  5.25364971
     3.15589523]
   [ 3.74774575  4.78569221  4.83339024 ...,  5.0071063   5.0071063
     1.9427563 ]]]]
conv1_1_weight  of singa  [[ 0.00850634  0.00754428  0.0104667  ..., 
-0.01527147 -0.01406416
  -0.0043859 ]
 [ 0.01347855 -0.00352572 -0.00838911 ...,  0.00827447 -0.01116868
   0.00906964]
 [-0.01065893  0.01276438  0.00548413 ..., -0.00523596  0.00598517
  -0.0058665 ]
 ..., 
 [-0.00287419 -0.00421872  0.01433577 ..., -0.00313204 -0.00219617
   0.00922761]
 [-0.00592723  0.00357399 -0.00755919 ..., -0.00227656 -0.00796929
   0.01343615]
 [-0.00255021  0.02384895 -0.00994168 ...,  0.0045142  -0.02454388
   0.00163318]]
conv1_1_bias of singa [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  
0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  
0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.]


caffe prototxt:
name: "VGG_ILSVRC_16_layers"
input: "data"
input_dim: 10
input_dim: 3
input_dim: 224
input_dim: 224
layers {
  bottom: "data"
  top: "conv1_1"
  name: "conv1_1"
  type: CONVOLUTION
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv1_1"
  top: "conv1_1"
  name: "relu1_1"
  type: RELU
}
layers {
  bottom: "conv1_1"
  top: "conv1_2"
  name: "conv1_2"
  type: CONVOLUTION
  convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv1_2"
  top: "conv1_2"
  name: "relu1_2"
  type: RELU
}
layers {
  bottom: "conv1_2"
  top: "pool1"
  name: "pool1"
  type: POOLING
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layers {
  bottom: "pool1"
  top: "conv2_1"
  name: "conv2_1"
  type: CONVOLUTION
  convolution_param {
    num_output: 128
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv2_1"
  top: "conv2_1"
  name: "relu2_1"
  type: RELU
}
layers {
  bottom: "conv2_1"
  top: "conv2_2"
  name: "conv2_2"
  type: CONVOLUTION
  convolution_param {
    num_output: 128
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv2_2"
  top: "conv2_2"
  name: "relu2_2"
  type: RELU
}
layers {
  bottom: "conv2_2"
  top: "pool2"
  name: "pool2"
  type: POOLING
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layers {
  bottom: "pool2"
  top: "conv3_1"
  name: "conv3_1"
  type: CONVOLUTION
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv3_1"
  top: "conv3_1"
  name: "relu3_1"
  type: RELU
}
layers {
  bottom: "conv3_1"
  top: "conv3_2"
  name: "conv3_2"
  type: CONVOLUTION
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv3_2"
  top: "conv3_2"
  name: "relu3_2"
  type: RELU
}
layers {
  bottom: "conv3_2"
  top: "conv3_3"
  name: "conv3_3"
  type: CONVOLUTION
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv3_3"
  top: "conv3_3"
  name: "relu3_3"
  type: RELU
}
layers {
  bottom: "conv3_3"
  top: "pool3"
  name: "pool3"
  type: POOLING
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layers {
  bottom: "pool3"
  top: "conv4_1"
  name: "conv4_1"
  type: CONVOLUTION
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv4_1"
  top: "conv4_1"
  name: "relu4_1"
  type: RELU
}
layers {
  bottom: "conv4_1"
  top: "conv4_2"
  name: "conv4_2"
  type: CONVOLUTION
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv4_2"
  top: "conv4_2"
  name: "relu4_2"
  type: RELU
}
layers {
  bottom: "conv4_2"
  top: "conv4_3"
  name: "conv4_3"
  type: CONVOLUTION
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv4_3"
  top: "conv4_3"
  name: "relu4_3"
  type: RELU
}
layers {
  bottom: "conv4_3"
  top: "pool4"
  name: "pool4"
  type: POOLING
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layers {
  bottom: "pool4"
  top: "conv5_1"
  name: "conv5_1"
  type: CONVOLUTION
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv5_1"
  top: "conv5_1"
  name: "relu5_1"
  type: RELU
}
layers {
  bottom: "conv5_1"
  top: "conv5_2"
  name: "conv5_2"
  type: CONVOLUTION
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv5_2"
  top: "conv5_2"
  name: "relu5_2"
  type: RELU
}
layers {
  bottom: "conv5_2"
  top: "conv5_3"
  name: "conv5_3"
  type: CONVOLUTION
  convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
  }
}
layers {
  bottom: "conv5_3"
  top: "conv5_3"
  name: "relu5_3"
  type: RELU
}
layers {
  bottom: "conv5_3"
  top: "pool5"
  name: "pool5"
  type: POOLING
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layers {
  name: "flatdata"
  type: FLATTEN
  bottom: "pool5"
  top: "flatdata"
}
layers {
  bottom: "flatdata"
  top: "fc6"
  name: "fc6"
  type: INNER_PRODUCT
  inner_product_param {
    num_output: 4096
  }
}
layers {
  bottom: "fc6"
  top: "fc6"
  name: "relu6"
  type: RELU
}
layers {
  bottom: "fc6"
  top: "fc6"
  name: "drop6"
  type: DROPOUT
  dropout_param {
    dropout_ratio: 0.5
  }
}
layers {
  bottom: "fc6"
  top: "fc7"
  name: "fc7"
  type: INNER_PRODUCT
  inner_product_param {
    num_output: 4096
  }
}
layers {
  bottom: "fc7"
  top: "fc7"
  name: "relu7"
  type: RELU
}
layers {
  bottom: "fc7"
  top: "fc7"
  name: "drop7"
  type: DROPOUT
  dropout_param {
    dropout_ratio: 0.5
  }
}
layers {
  bottom: "fc7"
  top: "fc8"
  name: "fc8"
  type: INNER_PRODUCT
  inner_product_param {
    num_output: 1000
  }
}
layers {
  bottom: "fc8"
  top: "prob"
  name: "prob"
  type: SOFTMAX
}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to