sxjscience commented on a change in pull request #15768: Fix gradient tensor 
mutate in `{adam/ftrl/rmprop/rmspropalex}_update`.
URL: https://github.com/apache/incubator-mxnet/pull/15768#discussion_r319195547
 
 

 ##########
 File path: src/operator/optimizer_op-inl.h
 ##########
 @@ -1596,57 +1611,76 @@ struct RMSPropAlexParam : public 
dmlc::Parameter<RMSPropAlexParam> {
   }
 };
 
+struct RMSPropAlexUpdateKernel {
+  template<typename DType>
+  MSHADOW_XINLINE static void Map(int i, DType* out_data,
+    DType* state_n_data, DType* state_g_data, DType* delta_data,
+    const DType* weight_data, const DType* grad_data,
+    const DType clip_gradient, const DType rescale_grad,
+    const DType gamma1, const DType gamma2,
+    const DType lr, const DType wd,
+    const DType clip_weights, const DType epsilon,
+    const OpReqType req) {
+    using namespace mshadow_op;
+
+    const DType rescaled_grad = rescale_grad * grad_data[i] +
+           wd * weight_data[i];
 
 Review comment:
   I find that we can actually simplify the code by adding the if-else 
statement here.
   ```c++
   if(clip_gradient >= 0.0f) {
      rescaled_grad = clip::Map(rescaled_grad, clip_gradient);
   }
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to