@wkcn 
我用的时mxnet-cu80,这是我定义的softmax层,主要是当label为全0时,回传0梯度,主要是想mxnet的module多任务,但每个任务单独训练,就是因为数据只有单独一个任务的标签
```
class Softmax(mx.operator.CustomOp):
  def forward(self, is_train, req, in_data, out_data, aux):
      x = in_data[0]
      y = mx.nd.exp(x - x.max(axis=1).reshape((x.shape[0], 1)))
      y[:] = mx.nd.divide(y, y.sum(axis=1).reshape(x.shape[0], 1))
      self.assign(out_data[0], req[0], mx.nd.array(y))

  def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
      l = in_data[1].astype('int32')
      y = out_data[0]
      if l.sum().asnumpy() == 0:
        self.assign(in_grad[0], req[0], mx.nd.zeros_like(y))
      else:
        y[np.arange(l.shape[0]), l] -= 1.0
        y = y / 160
        self.assign(in_grad[0], req[0], mx.nd.array(y))

@mx.operator.register("softmax")
class SoftmaxProp(mx.operator.CustomOpProp):
  def __init__(self):
      super(SoftmaxProp, self).__init__(need_top_grad=False)

  def list_arguments(self):
      return ['data', 'label']

  def list_outputs(self):
      return ['output']

  def infer_shape(self, in_shape):
      data_shape = in_shape[0]
      label_shape = (in_shape[0][0],)
      output_shape = in_shape[0]
      return [data_shape, label_shape], [output_shape], []

  def infer_type(self, in_type):
      return in_type, [in_type[0]], []

  def create_operator(self, ctx, shapes, dtypes):
      return Softmax()

```


[ Full content available at: 
https://github.com/apache/incubator-mxnet/issues/9920 ]
This message was relayed via gitbox.apache.org for [email protected]

Reply via email to