haojin2 commented on a change in pull request #15902: Numpy add numpy op roll
URL: https://github.com/apache/incubator-mxnet/pull/15902#discussion_r318875806
##########
File path: tests/python/unittest/test_numpy_op.py
##########
@@ -1126,6 +1126,54 @@ def test_np_randint():
verify_generator(generator=generator_mx_same_seed,
buckets=buckets, probs=probs, nrepeat=100)
+@with_seed()
+@use_np
+def test_np_roll():
+ class TestRoll(HybridBlock):
+ def __init__(self, shift=None, axis=None):
+ super(TestRoll, self).__init__()
+ self._shift = shift
+ self._axis = axis
+
+ def hybrid_forward(self, F, x):
+ return F.np.roll(x, shift=self._shift, axis=self._axis)
+
+ dtypes = ['int32', 'int64', 'float16', 'float32', 'float64']
+ configs = [
+ ((), (3,), None),
+ ((1,), (-3,), None),
+ ((20,), (-3,), None),
+ ((3,), (2,), 0),
+ ((2, 3, 4), (12,), (1,)),
+ ((2, 3, 4), (10, -10), (0, 1)),
+ ((2, 3, 4, 5), (0, 1), (-1, 2)),
+ ((2, 3, 0, 1), (0, 1), (-1, 2)),
+ ((2, 3, 4, 5), 10, (0, 2)),
+ ]
+ for dtype in dtypes:
+ for config in configs:
+ for hybridize in [False, True]:
+ shape, shift, axis = config[0], config[1], config[2]
+ x = rand_ndarray(shape=shape, dtype=dtype).as_np_ndarray()
+ net = TestRoll(shift=shift, axis=axis)
+ np_out = _np.roll(x.asnumpy(), shift=shift, axis=axis)
+ if hybridize:
+ net.hybridize()
+ x.attach_grad()
+ with mx.autograd.record():
+ mx_out = net(x)
+ assert mx_out.shape == np_out.shape
+ mx_out.backward()
+ assert same(mx_out.asnumpy(), np_out)
+ assert same(x.grad.shape, x.shape)
+ assert same(x.grad.asnumpy(), _np.ones(shape))
+
+ # test imperativen
Review comment:
Get rid of this line.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services