yzhliu commented on a change in pull request #6054:
URL: https://github.com/apache/incubator-tvm/pull/6054#discussion_r461254852
##########
File path: tests/python/frontend/mxnet/test_forward.py
##########
@@ -1410,6 +1411,221 @@ def verify(data_shape, axis, use_length, length):
verify((2, 3, 4), 2, True, np.array([[3, 4, 2], [1, 2,
1]]).astype('int32'))
[email protected](not hasattr(mx.sym.np, 'pad'), reason="mx.sym.np.pad
hasn't been publish yet")
[email protected](
+ "data_shape, pad_width",
+ [((1,1,3,5),(0,0,0,0,1,2,3,4)), ((1,1,3,5,7),(0,0,0,0,1,2,3,4,5,6))]
+)
[email protected]("mode", ["constant", "edge", "reflect"])
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32'])
[email protected]("constant_value", [0.0, 3.0])
+def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value):
+ data_np = np.random.uniform(size=data_shape).astype(dtype)
+ data = mx.sym.var('data')
+ if mode == 'constant':
+ ref_res = mx.ndarray.pad(mx.nd.array(data_np),
mode=mode,pad_width=pad_width, constant_value=constant_value)
+ mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode,
pad_width=pad_width, constant_values=constant_value)
+ else:
+ ref_res = mx.ndarray.pad(mx.nd.array(data_np),
mode=mode,pad_width=pad_width)
+ mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode,
pad_width=pad_width)
+ mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["debug"]:
Review comment:
why is this one using debug only?
##########
File path: tests/python/frontend/mxnet/test_forward.py
##########
@@ -1410,6 +1411,221 @@ def verify(data_shape, axis, use_length, length):
verify((2, 3, 4), 2, True, np.array([[3, 4, 2], [1, 2,
1]]).astype('int32'))
[email protected](not hasattr(mx.sym.np, 'pad'), reason="mx.sym.np.pad
hasn't been publish yet")
[email protected](
+ "data_shape, pad_width",
+ [((1,1,3,5),(0,0,0,0,1,2,3,4)), ((1,1,3,5,7),(0,0,0,0,1,2,3,4,5,6))]
+)
[email protected]("mode", ["constant", "edge", "reflect"])
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32'])
[email protected]("constant_value", [0.0, 3.0])
+def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value):
+ data_np = np.random.uniform(size=data_shape).astype(dtype)
+ data = mx.sym.var('data')
+ if mode == 'constant':
+ ref_res = mx.ndarray.pad(mx.nd.array(data_np),
mode=mode,pad_width=pad_width, constant_value=constant_value)
+ mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode,
pad_width=pad_width, constant_values=constant_value)
+ else:
+ ref_res = mx.ndarray.pad(mx.nd.array(data_np),
mode=mode,pad_width=pad_width)
+ mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode,
pad_width=pad_width)
+ mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["debug"]:
Review comment:
maybe we can also use `@pytest.mark.parametrize` for ctx_list and kind.
##########
File path: tests/python/frontend/mxnet/test_forward.py
##########
@@ -1410,6 +1411,221 @@ def verify(data_shape, axis, use_length, length):
verify((2, 3, 4), 2, True, np.array([[3, 4, 2], [1, 2,
1]]).astype('int32'))
[email protected](not hasattr(mx.sym.np, 'pad'), reason="mx.sym.np.pad
hasn't been publish yet")
[email protected](
+ "data_shape, pad_width",
+ [((1,1,3,5),(0,0,0,0,1,2,3,4)), ((1,1,3,5,7),(0,0,0,0,1,2,3,4,5,6))]
+)
[email protected]("mode", ["constant", "edge", "reflect"])
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32'])
[email protected]("constant_value", [0.0, 3.0])
+def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value):
+ data_np = np.random.uniform(size=data_shape).astype(dtype)
+ data = mx.sym.var('data')
+ if mode == 'constant':
+ ref_res = mx.ndarray.pad(mx.nd.array(data_np),
mode=mode,pad_width=pad_width, constant_value=constant_value)
+ mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode,
pad_width=pad_width, constant_values=constant_value)
+ else:
+ ref_res = mx.ndarray.pad(mx.nd.array(data_np),
mode=mode,pad_width=pad_width)
+ mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode,
pad_width=pad_width)
+ mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np)
+ tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(),
rtol=1e-5)
+
+
[email protected]("data_shape", [(2,2,2),(2,7,2)])
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32',
'bool'])
[email protected]("axes", [(1,0,2),None])
+def test_forward_npi_transpose(data_shape, axes, dtype):
+ def verify(data_shape, axes=None):
+ data_np = np.random.uniform(size=data_shape).astype(dtype)
+ data = mx.sym.var('data')
+ ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes)
+ mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes)
+ mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np)
+ tvm.testing.assert_allclose(op_res.asnumpy(),
ref_res.asnumpy(), rtol=1e-5)
+
+
[email protected](
+ "data_shape1, data_shape2, axis",
+
[((2,2),(2,2),1),((2,4),(2,3),1),((1,3,2),(1,3,5),2),((1,3,3),(1,3,3),1),((1,3),(1,3),0)]
+)
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32'])
+def test_forward_npi_concatenate(data_shape1, data_shape2, axis, dtype):
+ data_np1 = np.random.uniform(size=data_shape1).astype(dtype)
+ data_np2 = np.random.uniform(size=data_shape2).astype(dtype)
+ data1 = mx.sym.var('data1')
+ data2 = mx.sym.var('data2')
+ ref_res = mx.np.concatenate([mx.np.array(data_np1),
mx.np.array(data_np2)], axis=axis)
+ mx_sym = mx.sym.np.concatenate([data1.as_np_ndarray(),
data2.as_np_ndarray()], axis=axis)
+ mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data1": data_shape1,
"data2": data_shape2}, dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np1, data_np2)
+ tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(),
rtol=1e-5)
+
+
[email protected]("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8)])
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32',
'bool'])
+def test_forward_np_copy(data_shape,dtype):
+ data_np = np.random.uniform(size=data_shape).astype(dtype)
+ data = mx.sym.var('data')
+ ref_res = mx.np.copy(mx.np.array(data_np))
+ mx_sym = mx.sym.np.copy(data.as_np_ndarray())
+ mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np)
+ tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(),
rtol=1e-5)
+
+
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32',
'bool'])
+def test_forward_npx_reshape(dtype):
+ def verify(data_shape,out_shape,reverse=False):
+ data_np = np.random.uniform(size=data_shape).astype(dtype)
+ data = mx.sym.var('data')
+ ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape,
reverse=reverse)
+ mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape,
reverse=reverse)
+ mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np)
+ tvm.testing.assert_allclose(op_res.asnumpy(),
ref_res.asnumpy(), rtol=1e-5)
+
+ verify(data_shape=(2, 3, 8), out_shape=(-2, -2, 2, -1))
+ verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-6, 2, -1, -4))
+ verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-5, -4))
+ verify(data_shape=(8, 3, 3, 3, 3, 8), out_shape=(-4, -5), reverse=True)
+ verify(data_shape=(8, 3, 2, 4, 8), out_shape=(-4, -1, 2, -6), reverse=True)
+
+
[email protected]("data_shape",
[(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)])
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32'])
+def test_forward_npi_binary(data_shape,dtype):
+ ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.less]
+ mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add,
mx.sym.np.less]
+ for i in range(len(ref_ops)):
+ ref_op = ref_ops[i]
+ mx_op = mx_ops[i]
+ # mx.np.power only support float type
+ if ref_op == mx.np.power and dtype not in ['float64', 'float32']:
+ continue
+ data_np1 = np.random.uniform(size=data_shape).astype(dtype)
+ data_np2 = np.random.uniform(size=data_shape).astype(dtype)
+ data1 = mx.sym.var('lhs')
+ data2 = mx.sym.var('rhs')
+ ref_res = ref_op(mx.np.array(data_np1), mx.np.array(data_np2))
+ mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray())
+ mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape,
"rhs": data_shape}, dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np1, data_np2)
+ tvm.testing.assert_allclose(op_res.asnumpy(),
ref_res.asnumpy(), rtol=1e-5)
+
+
[email protected]("data_shape",
[(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)])
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32'])
[email protected]("scalar", [1.0,2.0,3.0,4.0])
+def test_forward_npi_binary_scalar(data_shape,dtype,scalar):
+ ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.true_divide]
+ mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add,
mx.sym.np.true_divide]
+ for i in range(len(ref_ops)):
+ ref_op = ref_ops[i]
+ mx_op = mx_ops[i]
+ # mx.np.power only support float type
+ if ref_op == mx.np.power and dtype not in ['float64', 'float32']:
+ continue
+ data_np1 = np.random.uniform(size=data_shape).astype(dtype)
+ data1 = mx.sym.var('lhs')
+ ref_res = ref_op(mx.np.array(data_np1), scalar)
+ mx_sym = mx_op(data1.as_np_ndarray(), scalar)
+ mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np1)
+ tvm.testing.assert_allclose(op_res.asnumpy(),
ref_res.asnumpy(), rtol=1e-5)
+
+
[email protected]("data_shape",
[(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)])
[email protected]("dtype", ['float64', 'float32'])
+def test_forward_npi_tanh(data_shape,dtype):
+ data_np1 = np.random.uniform(size=data_shape).astype(dtype)
+ data1 = mx.sym.var('data')
+ ref_res = mx.np.tanh(mx.np.array(data_np1))
+ mx_sym = mx.sym.np.tanh(data1.as_np_ndarray())
+ mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np1)
+ tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(),
rtol=1e-5)
+
+
[email protected](not hasattr(mx.np, 'where'), reason="mx.np.where hasn't
been publish yet")
[email protected]("data_shape", [(2,2,2),(2,7,2),(1,8),(2,2),(1,3)])
[email protected]("cond_dtype", ['float64', 'float32', 'int64',
'int32', 'bool'])
[email protected]("data_dtype", ['float64', 'float32', 'int64',
'int32', 'bool'])
[email protected]("scalar", [1.0,2.0])
+def test_forward_npi_where_rscalar(data_shape,cond_dtype,data_dtype,scalar):
+ if data_dtype == 'bool':
+ scalar = scalar == 0.0
+ cond_np = np.random.uniform(size=data_shape).astype(cond_dtype)
+ data_np = np.random.uniform(size=data_shape).astype(data_dtype)
+ cond = mx.sym.var('condition')
+ data = mx.sym.var('x')
+ ref_res = mx.np.where(mx.np.array(cond_np), mx.np.array(data_np), scalar)
+ mx_sym = mx.sym.np.where(cond.as_np_ndarray(), data.as_np_ndarray(),
scalar)
+ dtypeDic = {}
+ dtypeDic["condition"] = cond_dtype
+ dtypeDic["x"] = data_dtype
+ mod, _ = relay.frontend.from_mxnet(
+ mx_sym, shape={"condition": data_shape, "x": data_shape},
+ dtype=dtypeDic)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(cond_np, data_np)
+ tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(),
rtol=1e-5)
+
+
[email protected]("dtype", ['float64', 'float32', 'int64', 'int32',
'bool'])
+def test_forward_split_v2(dtype):
+ def verify(data_shape, axis=0, indices_or_sections=1, squeeze_axis=False):
+ data_np = np.random.uniform(size=data_shape).astype(dtype)
+ data = mx.sym.var('data')
+ ref_res = mx.ndarray.split_v2(mx.nd.array(data_np),
indices_or_sections, axis=axis, squeeze_axis=squeeze_axis)
+ mx_sym = mx.sym.split_v2(data.as_nd_ndarray(), indices_or_sections,
axis=axis, squeeze_axis=squeeze_axis)
+ mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape},
dtype=dtype)
+ for target, ctx in ctx_list():
+ for kind in ["graph", "vm", "debug"]:
+ intrp = relay.create_executor(kind, mod=mod, ctx=ctx,
target=target)
+ op_res = intrp.evaluate()(data_np)
+ op_res_ = []
+ for arr in op_res:
+ op_res_.append(arr.asnumpy().tolist())
+ ref_res_ = []
+ for arr in ref_res:
+ ref_res_.append(arr.asnumpy().tolist())
+ tvm.testing.assert_allclose(op_res_, ref_res_, rtol=1e-5)
+
+ verify((3, 2, 1), axis=1, indices_or_sections=2)
+ verify((3, 2, 1), axis=0, indices_or_sections=3)
+ verify((3, 2, 1), axis=0, indices_or_sections=3, squeeze_axis=True)
+ verify((3, 2, 1), axis=0, indices_or_sections=(1, 2))
+
+
if __name__ == '__main__':
test_forward_mlp()
Review comment:
remove all the test_xxx function calls and add `pytest.main([__file__])`
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]