eric-haibin-lin commented on a change in pull request #14830: [WIP] Use env var 
to enforce safe accumulation in ReduceAxesCompute
URL: https://github.com/apache/incubator-mxnet/pull/14830#discussion_r281773490
 
 

 ##########
 File path: tests/python/unittest/test_operator.py
 ##########
 @@ -3421,51 +3421,57 @@ def l2norm(input_data, axis=0, keepdims=True):
     epsilon = 1e-3
     acc_type = {np.float16: np.float32, np.float32: np.float32, np.float64: 
np.float64,
                 np.int32: np.int32, np.int64: np.int64}
+    dtype_to_str = {np.float16: 'float16', np.float32: 'float32', np.float64: 
'float64',
+                    np.int32: 'int32', np.int64: 'int64'}
     is_windows = sys.platform.startswith('win')
-    for order in [1, 2]:
-        for dtype in [np.float16, np.float32, np.float64, np.int32, np.int64]:
-            for i in range(in_data_dim):
-                for out_dtype in ['float32', 'float64', 'int32', 'int64']:
-                    if (dtype == np.int32 or dtype == np.int64) and ('int' not 
in out_dtype or is_windows):
-                        continue
-                    if dtype != np.int32 and dtype != np.int64 and 'int' in 
out_dtype:
-                        continue
-                    backward_dtype = np.float32 if out_dtype == 'float32' else 
np.float64
-                    skip_backward = 'int' in out_dtype
-                    print(order, dtype, i, out_dtype, in_shape)
-                    in_data = np.random.uniform(-1, 1, 
in_shape).astype(acc_type[dtype])
-                    in_data[abs(in_data) < epsilon] = 2 * epsilon
-                    norm_sym = mx.symbol.norm(data=data, ord=order, axis=i, 
out_dtype=out_dtype, keepdims=True)
-                    npy_out = l1norm(in_data, i) if order is 1 else 
l2norm(in_data, i)
-                    npy_out_backward = np.sign(in_data) if order is 1 else 
in_data/npy_out
-                    check_symbolic_forward(norm_sym, [in_data.astype(dtype)], 
[npy_out.astype(out_dtype)],
-                                           rtol=1e-3, atol=1e-5, ctx=ctx)
-                    if not skip_backward:
-                        check_symbolic_backward(norm_sym, 
[in_data.astype(dtype)],
-                                                
[np.ones(npy_out.shape).astype(out_dtype)],
-                                                [npy_out_backward], rtol=1e-3, 
atol=1e-5, ctx=ctx,
-                                                dtype=backward_dtype)
-                    # Disable numeric gradient 
https://github.com/apache/incubator-mxnet/issues/11509
-                    # check gradient
-                    if dtype is not np.float16 and not skip_backward:
-                        check_numeric_gradient(norm_sym, [in_data], 
numeric_eps=epsilon,
-                                               rtol=1e-1, atol=1e-3, 
dtype=backward_dtype)
-                    if i < in_data_dim-1:
-                        norm_sym = mx.symbol.norm(data=data, ord=order, 
axis=(i, i+1), keepdims=True)
-                        npy_out = l1norm(in_data, (i, i+1)) if order is 1 else 
l2norm(in_data, (i, i+1))
+    for enforce_safe_acc in [True, False]:
+        os.environ["MXNET_ENFORCE_SAFE_ACCUMULATION"] = str(enforce_safe_acc)
+        for order in [1, 2]:
+            for dtype in [np.float16, np.float32, np.float64]:
+                for i in range(in_data_dim):
+                    for out_dtype in ['float32', 'float64']:
+                        backward_dtype = np.float32 if out_dtype == 'float32' 
else np.float64
+                        accumulation_type = acc_type[dtype]
+                        if not enforce_safe_acc:
+                            backward_dtype = dtype
+                            out_dtype = dtype_to_str[dtype]
+                            accumulation_type = dtype
+                        print(dtype, out_dtype, accumulation_type)
 
 Review comment:
   remove print

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to