access2rohit commented on a change in pull request #19059:
URL: https://github.com/apache/incubator-mxnet/pull/19059#discussion_r485261131



##########
File path: tests/nightly/test_np_large_array.py
##########
@@ -101,30 +101,32 @@ def test_zeros():
     assert A[0][0] == 0
 
 @use_np
-def test_abs():
-    A = np.ones((INT_OVERFLOW, 2))
-    A[0][0] = -1
-    A.attach_grad()
-    with mx.autograd.record():
-        B = np.abs(A)
-    assert B.shape == (INT_OVERFLOW, 2)
-    assert B[0][0] == 1
-    B.backward()
-    assert A.grad.shape == (INT_OVERFLOW, 2)
-    assert A.grad[0][0] == -1
+def test_ones_like():
+    A = np.ones((2, INT_OVERFLOW))
+    B = np.ones_like(A)
+    assert B.shape == A.shape
+    assert B[0, 0] == 1 and B[-1, -1] == 1
 
 @use_np
-def test_absolute():
+def test_zeros_like():
     A = np.ones((INT_OVERFLOW, 2))
-    A[0][0] = -1
+    B = np.zeros_like(A)
+    assert B.shape == A.shape
+    assert B[0, 0] == 0 and B[-1, -1] == 0
+
+@use_np
+def test_abs():
+    # abs absolute and fabs are the same thing
+    A = np.zeros((INT_OVERFLOW, 2))
+    A[-1, -1] = -1
     A.attach_grad()
     with mx.autograd.record():
-        B = np.absolute(A)
+        B = np.abs(A)
     assert B.shape == (INT_OVERFLOW, 2)
-    assert B[0][0] == 1
+    assert B[-1, -1] == 1
     B.backward()

Review comment:
       can you move it inside `autograd.record()`




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to