aGiant opened a new issue #17814: 
mxnet.gluon.data.vision.transforms.Normalize(mean=0.0, std=1.0) tuple issue 
within hybird_forward()
URL: https://github.com/apache/incubator-mxnet/issues/17814
 
 
   From official mxnet web: 
https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/training/normalization/index.html
 and 
https://mxnet.apache.org/api/python/docs/api/gluon/data/vision/transforms/index.html#mxnet.gluon.data.vision.transforms.Normalize,
 the Normalize() worked very well, but not in hybird_forward()
   Codes
   ```
   from mxnet.gluon.data.vision.transforms import Normalize
   class VAE(gluon.HybridBlock):
       def __init__(self, n_hidden, n_latent, n_layers, n_output, min_vec, 
max_vec, act_type='relu', **kwargs):
           super(VAE, self).__init__()
           self.soft_zero = 1e-10
           self.normalizer = Normalize(mean=min_vec, std=max_vec)
           self.n_latent = n_latent
           self.batch_size = 0
           self.mu = None
           
           # note to self: requring batch_size in model definition is sad, 
           # not sure how to deal with this otherwise though
           super(VAE, self).__init__(**kwargs)
           # self.use_aux_logits = use_aux_logits
           # self.normalizer = nn.LayerNorm()
           self.trans = nn.HybridLambda(lambda F, x: x)
           with self.name_scope():
               self.encoder = nn.HybridSequential(prefix='encoder_')
               with self.encoder.name_scope():
                   for i in range(n_layers):
                       self.encoder.add(nn.Dense(n_hidden, activation=act_type))
                   self.encoder.add(nn.Dense(n_latent*2, activation=None))
   
               self.decoder = nn.HybridSequential(prefix='decoder_')
               with self.decoder.name_scope():
                   for i in range(n_layers):
                       self.decoder.add(nn.Dense(n_hidden, activation=act_type))
                   self.decoder.add(nn.Dense(n_output, activation='sigmoid'))
       def forward(self,x):
           self.batch_size = x.shape[0]
           return gluon.HybridBlock.forward(self, x)
       
       def hybrid_forward(self, F, x):
           x_normalized = self.normalizer(x)
           h = self.encoder(x_normalized)
           mu_lv = F.split(h, axis=1, num_outputs=2)
           mu = mu_lv[0]
           lv = mu_lv[1]
           self.mu = mu
           # this would work fine only for nd (i.e. non-hybridized block)
           eps = F.random_normal(loc=0, scale=1, shape=(self.batch_size, 
self.n_latent), ctx=model_ctx)
           z = mu + F.exp(0.5*lv)*eps
           y = self.decoder(z)
           before_sum = 1+lv-mu*mu-F.exp(lv)
           KL = 0.5*F.nansum(before_sum, axis=1)
           first = x_normalized*F.log(y+self.soft_zero)
           second = (1-x_normalized)*F.log(1-y+self.soft_zero)
           total = first + second
           logloss = F.nansum(total, axis=1)
           loss = -logloss-KL
           return loss, y 
   ```
   Test results:
   ```
   MXNetError                         Traceback (most recent call last)
   <ipython-input-40-cbd52c8ac2cb> in <module>
         2 net.collect_params().initialize(mx.init.Xavier(), ctx=model_ctx)
         3 #net(mx.nd.random.uniform(shape=(128,feature_n), ctx=model_ctx))
   ----> 4 print(net.summary(mx.nd.random.uniform(shape=(1,feature_n), 
ctx=model_ctx)))
         5 net.hybridize()
         6 trainer = gluon.Trainer(net.collect_params(), 'SGD', {'wd':0.01}) 
#'learning_rate': .001,
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in 
summary(self, *inputs)
       648         try:
       649             self.apply(_register_summary_hook)
   --> 650             self(*inputs)
       651 
       652             line_format = '{:>20}  {:>42} {:>15}'
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in 
__call__(self, *args)
       546             hook(self, args)
       547 
   --> 548         out = self.forward(*args)
       549 
       550         for hook in self._forward_hooks.values():
   
   <ipython-input-36-fd3336d944b4> in forward(self, x)
        45     def forward(self,x):
        46         self.batch_size = x.shape[0]
   ---> 47         return gluon.HybridBlock.forward(self, x)
        48 
        49     # 
https://mxnet.apache.org/api/python/docs/tutorials/extend/custom_layer.html
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in 
forward(self, x, *args)
       923                     params = {i: j.data(ctx) for i, j in 
self._reg_params.items()}
       924 
   --> 925                 return self.hybrid_forward(ndarray, x, *args, 
**params)
       926 
       927         assert isinstance(x, Symbol), \
   
   <ipython-input-36-fd3336d944b4> in hybrid_forward(self, F, x)
        52         x_ = x.reshape((x.shape[0], x.shape[1], 1))
        53         #x_normalized = 
F.broadcast_div(F.broadcast_sub(self.flatten(x), self.min_v), 
(F.broadcast_sub(self.max_v, self.min_v)))
   ---> 54         x_normalized = self.normalizer(x_)
        55         h = self.encoder(x_normalized)
        56         #print(h.asnumpy()[0])
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in 
__call__(self, *args)
       546             hook(self, args)
       547 
   --> 548         out = self.forward(*args)
       549 
       550         for hook in self._forward_hooks.values():
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in 
forward(self, x, *args)
       923                     params = {i: j.data(ctx) for i, j in 
self._reg_params.items()}
       924 
   --> 925                 return self.hybrid_forward(ndarray, x, *args, 
**params)
       926 
       927         assert isinstance(x, Symbol), \
   
   
~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/data/vision/transforms.py 
in hybrid_forward(self, F, x)
       188 
       189     def hybrid_forward(self, F, x):
   --> 190         return F.image.normalize(x, self._mean, self._std)
       191 
       192 
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/ndarray/register.py in 
normalize(data, mean, std, out, name, **kwargs)
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/_ctypes/ndarray.py in 
_imperative_invoke(handle, ndargs, keys, vals, out)
        90         c_str_array(keys),
        91         c_str_array([str(s) for s in vals]),
   ---> 92         ctypes.byref(out_stypes)))
        93 
        94     if original_output is not None:
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/base.py in check_call(ret)
       251     """
       252     if ret != 0:
   --> 253         raise MXNetError(py_str(_LIB.MXGetLastError()))
       254 
       255 
   
   MXNetError: Invalid Parameter format for std expect tuple of <float> but 
value='(1.0, 107000000.0, 107000000.0, 74200000.0, 3893.333333, 
3735.7368420000003, 4672.0, 5838440.0, 120000000.0, 120000000.0, 120000000.0, 
84418013.7826341, 3735.7368420000003, 2896.0, 4113.240146, 2000000.0, 1.0, 
65534.0, 156.0, 1.0, 119999998.0, 120000000.0, 120000000.0, 120000000.0, 
84800000.0, inf, 4644908.0, 4644908.0, 120000000.0, 120000000.0, 120000000.0, 
84602929.2769822, 24820.0, 4672.0, 2065.0, 7125.5968458437, 1.0, 120000000.0, 
120000000.0, 76900000.0, 65535.0, 24820.0, 1448.0, 1.0, 3337.142857, 
4414.547151258, 19488226.550680302, 1.0, 1.0, 655453030.0, 291922.0, 
12870338.0, 291922.0, 219759.0, 655453030.0, 1.0, 213557.0, 138.0, 107000000.0, 
120000000.0, 19530.0, 1.0, inf, 120000000.0, 1.0, 3000000.0, 120000000.0, 
65535.0, 219759.0, 12900000.0)', in operator _image_normalize(name="", 
std="(1.0, 107000000.0, 107000000.0, 74200000.0, 3893.333333, 
3735.7368420000003, 4672.0, 5838440.0, 120000000.0, 120000000.0, 120000000.0, 
84418013.7826341, 3735.7368420000003, 2896.0, 4113.240146, 2000000.0, 1.0, 
65534.0, 156.0, 1.0, 119999998.0, 120000000.0, 120000000.0, 120000000.0, 
84800000.0, inf, 4644908.0, 4644908.0, 120000000.0, 120000000.0, 120000000.0, 
84602929.2769822, 24820.0, 4672.0, 2065.0, 7125.5968458437, 1.0, 120000000.0, 
120000000.0, 76900000.0, 65535.0, 24820.0, 1448.0, 1.0, 3337.142857, 
4414.547151258, 19488226.550680302, 1.0, 1.0, 655453030.0, 291922.0, 
12870338.0, 291922.0, 219759.0, 655453030.0, 1.0, 213557.0, 138.0, 107000000.0, 
120000000.0, 19530.0, 1.0, inf, 120000000.0, 1.0, 3000000.0, 120000000.0, 
65535.0, 219759.0, 12900000.0)", mean="(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 
-1073741320.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 
-4.0, -4.0, -4.0, -14.0, 0.0, -2000000.0, -32212234632.0, -32212234632.0, 0.0, 
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0, 
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, -536870661.0, 
0.0, 0.0, 0.0, 0.0, -261000000.0, 0.0, 0.0, 0.0, 0.0, -1.0, 1.0, 0.0)")
   ```
   
   The problem lies:
   ```
   ~/anaconda3/lib/python3.7/site-packages/mxnet/_ctypes/ndarray.py in 
_imperative_invoke(handle, ndargs, keys, vals, out)
        90         c_str_array(keys),
        91         c_str_array([str(s) for s in vals]),
   ---> 92         ctypes.byref(out_stypes)))
        93 
        94     if original_output is not None:
   ```
   all float values were transformed to "str" and then error was raised. 
   
   Till now gathered issues within hybird_forward():
   1. call self.parameters_nd_array failed
   2. call mx.gluon.Constant failed
   3. call F.broadcast_sub(self.flatten(x), self.min_v) failed
   4. call self.params.get('scales_max',
                                         shape=max_vec.shape,
                                         init=mx.init.Constant(max_vec),
                                         differentiable=False) failed.
   
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to