eric-haibin-lin commented on a change in pull request #14759: Fix softmax 
behavior to not cast up the accumulator if no output dtype is specified
URL: https://github.com/apache/incubator-mxnet/pull/14759#discussion_r277378902
 
 

 ##########
 File path: src/operator/nn/softmax-inl.h
 ##########
 @@ -442,23 +449,28 @@ void SoftmaxGradCompute(const nnvm::NodeAttrs& attrs,
     param.temperature.value() : 1.0;
   mxnet::TShape shape = AxisShapeCompact(inputs[0].shape_, &axis, true);
 
-  int out_idx = softmax_has_dtype_override(attrs) ? 2 : 1;
-
-  MXNET_REAL_ACC_TYPE_SWITCH(inputs[0].type_flag_, OType, AType, {
-    MSHADOW_REAL_TYPE_SWITCH(outputs[0].type_flag_, DType, {
-      MXNET_ASSIGN_REQ_SWITCH(req[0], Req, {
-        if (shape.ndim() == 2) {
-          SoftmaxGrad<OP1, OP2, Req, negate, AType>(
-              ctx.get_stream<xpu>(), inputs[out_idx].dptr<OType>(),
-              inputs[0].dptr<OType>(), outputs[0].dptr<DType>(),
-              shape.get<2>(), axis, static_cast<DType>(temperature));
-        } else {
-          SoftmaxGrad<OP1, OP2, Req, negate, AType>(
-              ctx.get_stream<xpu>(), inputs[out_idx].dptr<OType>(),
-              inputs[0].dptr<OType>(), outputs[0].dptr<DType>(),
-              shape.get<3>(), axis, static_cast<DType>(temperature));
-        }
-      });
+  int out_idx = sofmtax_dtype_param(attrs) != -1 ? 2 : 1;
 
 Review comment:
   I'd suggest we have an env var to trigger stable reduction instead of 
relying on dtype argument, including other ops like norm

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to