chinakook opened a new issue #8109: MakeLoss with CustomOp is useless.
URL: https://github.com/apache/incubator-mxnet/issues/8109
 
 
   The MakeLoss is useless when dealing with CustomOp.
   ```python
   import numpy as np
   import mxnet as mx
   
   class Weightsoftmax(mx.operator.CustomOp):
       def __init__(self):
           super(Weightsoftmax, self).__init__()
       def forward(self, is_train, req, in_data, out_data, aux):
           x = in_data[0].asnumpy()        
           
           y = np.exp(x - x.max(axis=1))
           y /= y.sum(axis=1)
                 
           self.assign(out_data[0], req[0], mx.nd.array(y))
           
       def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
           prob = out_data[0].asnumpy()
   
           weight = np.array([1,1.])
           f_label = in_data[1].asnumpy()
           f_label = f_label.astype(np.float32)
           label = np.concatenate((1-f_label,f_label),axis=1)
           
           out = prob - label
   
           out[:,0,:,:] *= weight[0]
           out[:,1,:,:] *= weight[1]
           #out/=(out.shape[2]*out.shape[3])
           
           self.assign(in_grad[0], req[0], mx.nd.array(out))
   @mx.operator.register("weightsoftmax")
   class WeightsoftmaxProp(mx.operator.CustomOpProp):
       def __init__(self):
           super(WeightsoftmaxProp, self).__init__(need_top_grad=False)
   
       def list_arguments(self):
           return ['indata', 'label']
   
       def list_outputs(self):
           return ['output']
   
       def infer_shape(self, in_shapes):
            
           data_shape = in_shapes[0]
           label_shape = in_shapes[1]
           output_shape = data_shape
           return [data_shape,label_shape], [output_shape],[]
   
       def create_operator(self, ctx, shapes, dtypes):
           return Weightsoftmax()
   
   x = mx.nd.array([[[[1,2,3],[4,5,6],[7,8,9]],[[2,3,4],[5,2,3],[3,4,2]]]])
   l = mx.nd.array([[[[1,0,1],[0,0,1],[1,1,0]]]])
   vm=mx.sym.Variable('m')
   vn=mx.sym.Variable('n')
   dx = mx.nd.empty(x.shape)
   out=mx.symbol.Custom(indata = vm ,label = vn, op_type='weightsoftmax', name 
= "loss_t" )
   out = mx.symbol.MakeLoss(name="myloss",data=out, grad_scale=0 
,normalization='null')
   exec_ = out.bind(ctx=mx.cpu(), args={'m':x, 'n':l}, args_grad={'m': dx})
   exec_.forward()
   print(exec_.outputs[0].asnumpy())
   exec_.backward(out_grads=mx.nd.ones_like(dx))
   print(exec_.grad_arrays)
   
   [[[[  2.68941432e-01   2.68941432e-01   2.68941432e-01]
      [  2.68941432e-01   9.52574134e-01   9.52574134e-01]
      [  9.82013762e-01   9.82013762e-01   9.99089003e-01]]
   
     [[  7.31058598e-01   7.31058598e-01   7.31058598e-01]
      [  7.31058598e-01   4.74258736e-02   4.74258736e-02]
      [  1.79862101e-02   1.79862101e-02   9.11051233e-04]]]]
   [
   [[[[  2.98823807e-02  -8.12287331e-02   2.98823807e-02]
      [ -8.12287331e-02  -5.26954047e-03   1.05841570e-01]
      [  1.09112643e-01   1.09112643e-01  -1.01221929e-04]]
   
     [[ -2.98823789e-02   8.12287331e-02  -2.98823789e-02]
      [  8.12287331e-02   5.26954141e-03  -1.05841570e-01]
      [ -1.09112643e-01  -1.09112643e-01   1.01227917e-04]]]]
   <NDArray 1x2x3x3 @cpu(0)>, None]
 
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to