This is an automated email from the ASF dual-hosted git repository.
ptrendx pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git
The following commit(s) were added to refs/heads/master by this push:
new dc69b04 [BUGFIX] Fix test_zero_sized_dim save/restore of np_shape
state (#20365)
dc69b04 is described below
commit dc69b04070c55f33c1ac2dc83be42be9c1a8c56f
Author: Dick Carter <[email protected]>
AuthorDate: Fri Jun 25 11:15:27 2021 -0700
[BUGFIX] Fix test_zero_sized_dim save/restore of np_shape state (#20365)
* Fix test_zero_sized_dim save/restore of np_shape state
* Trigger CI
---
tests/python/unittest/test_operator.py | 40 +++++++++++++++++-------------
tests/python/unittest/test_thread_local.py | 4 +--
2 files changed, 25 insertions(+), 19 deletions(-)
diff --git a/tests/python/unittest/test_operator.py
b/tests/python/unittest/test_operator.py
index 891511b..0e07c37 100644
--- a/tests/python/unittest/test_operator.py
+++ b/tests/python/unittest/test_operator.py
@@ -5333,19 +5333,21 @@ def test_boolean_mask():
assert same(data.grad.asnumpy(), expected_grad)
# test 0-size output
- mx.set_np_shape(True)
- data = mx.nd.array([[1, 2, 3],[4, 5, 6],[7, 8, 9]])
- index = mx.nd.array([0, 0, 0])
- data.attach_grad()
- with mx.autograd.record():
- out = mx.nd.contrib.boolean_mask(data, index)
- out.backward()
- data.grad.wait_to_read()
- expected = np.zeros((0, 3))
- expected_grad = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]])
- assert same(out.asnumpy(), expected)
- assert same(data.grad.asnumpy(), expected_grad)
- mx.set_np_shape(False)
+ prev_np_shape = mx.set_np_shape(True)
+ try:
+ data = mx.nd.array([[1, 2, 3],[4, 5, 6],[7, 8, 9]])
+ index = mx.nd.array([0, 0, 0])
+ data.attach_grad()
+ with mx.autograd.record():
+ out = mx.nd.contrib.boolean_mask(data, index)
+ out.backward()
+ data.grad.wait_to_read()
+ expected = np.zeros((0, 3))
+ expected_grad = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]])
+ assert same(out.asnumpy(), expected)
+ assert same(data.grad.asnumpy(), expected_grad)
+ finally:
+ mx.set_np_shape(prev_np_shape)
# test gradient
shape = (100, 30)
@@ -9463,7 +9465,8 @@ def test_sldwin_selfatten_operators():
def test_zero_sized_dim():
- mx.util.set_np_shape(True) # Must be done to prevent zero-sized dimension
conversion to 'unknown'
+ # Must be done to prevent zero-sized dimension conversion to 'unknown'
+ prev_np_shape = mx.util.set_np_shape(True)
def seq_last():
"""Test for issue:
https://github.com/apache/incubator-mxnet/issues/18938"""
@@ -9483,9 +9486,12 @@ def test_zero_sized_dim():
res = mx.nd.op.SequenceReverse(data)
assert data.shape == res.shape
- seq_last()
- seq_reverse()
- seq_mask()
+ try:
+ seq_last()
+ seq_reverse()
+ seq_mask()
+ finally:
+ mx.util.set_np_shape(prev_np_shape)
@mx.util.use_np
def test_take_grads():
diff --git a/tests/python/unittest/test_thread_local.py
b/tests/python/unittest/test_thread_local.py
index 8e4370e..9d1e529 100644
--- a/tests/python/unittest/test_thread_local.py
+++ b/tests/python/unittest/test_thread_local.py
@@ -213,7 +213,7 @@ def test_np_array_scope():
def test_np_global_shape():
- set_np_shape(2)
+ prev_np_shape = set_np_shape(2)
data = []
def f():
@@ -229,4 +229,4 @@ def test_np_global_shape():
assert_almost_equal(data[0].asnumpy(), np.ones(shape=()))
assert_almost_equal(data[1].asnumpy(), np.ones(shape=(0, 1, 2)))
finally:
- set_np_shape(0)
+ set_np_shape(prev_np_shape)