This is an automated email from the ASF dual-hosted git repository.

kparzysz pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/main by this push:
     new e2e1696c74 [Relay] Handle pad value coming from Tensor instead of 
scalar (#14735)
e2e1696c74 is described below

commit e2e1696c742957d2ac3b508b438a82c85e141794
Author: Krzysztof Parzyszek <[email protected]>
AuthorDate: Fri Apr 28 16:57:18 2023 -0500

    [Relay] Handle pad value coming from Tensor instead of scalar (#14735)
    
    * [Relay] Handle pad value coming from Tensor instead of scalar
    
    The PadCompute function would pass empty index to obtain the pad value.
    This caused a crash when the pad value was given in a tensor with the
    following message:
    
        Check failed: shape.size() == indices.size() (1 vs. 0)
          : Tensor dimension mismatch in read ndim = 1, indices.size=0
    
    * Move test to tests/python/relay/test_op_level2.py
---
 src/relay/op/nn/pad.cc               |  2 +-
 tests/python/relay/test_op_level2.py | 19 +++++++++++++++++++
 2 files changed, 20 insertions(+), 1 deletion(-)

diff --git a/src/relay/op/nn/pad.cc b/src/relay/op/nn/pad.cc
index 365873d2fd..8cfb369901 100644
--- a/src/relay/op/nn/pad.cc
+++ b/src/relay/op/nn/pad.cc
@@ -177,7 +177,7 @@ Array<te::Tensor> PadCompute(const Attrs& attrs, const 
Array<te::Tensor>& inputs
     pad_after.push_back(pad_width[i][1]);
   }
   te::Tensor cast_pad_value = topi::cast(inputs[1], inputs[0]->dtype);
-  const PrimExpr& pad_value = cast_pad_value(Array<PrimExpr>());
+  const PrimExpr& pad_value = 
cast_pad_value(Array<PrimExpr>(inputs[1]->shape.size(), 0));
   return Array<te::Tensor>{topi::pad(inputs[0], pad_before, pad_after, 
pad_value, "T_pad",
                                      topi::kElementWise, param->pad_mode)};
 }
diff --git a/tests/python/relay/test_op_level2.py 
b/tests/python/relay/test_op_level2.py
index 434b4fa0a0..0a0ae561ab 100644
--- a/tests/python/relay/test_op_level2.py
+++ b/tests/python/relay/test_op_level2.py
@@ -1444,6 +1444,25 @@ def test_pad_run_dynamic_pad_value():
     _test_run("int32")
 
 
+def test_pad_value_in_array():
+    A = relay.var("A", shape=(32, 32), dtype="int8")
+
+    # Extract pad value from an array
+    p0 = relay.Constant(tvm.nd.array(np.array([2], dtype="int8")))
+    p1 = relay.nn.pad(A, pad_value=p0, pad_width=((1, 1), (1, 1)))
+
+    func = relay.Function(relay.analysis.free_vars(p1), p1)
+    mod = tvm.IRModule.from_expr(func)
+
+    target = "llvm"
+    lib = relay.build(
+        mod,
+        tvm.target.Target(target, host=target),
+        runtime=relay.backend.Runtime("cpp"),
+        executor=relay.backend.Executor("aot", {"unpacked-api": False, 
"interface-api": "packed"}),
+    )
+
+
 @tvm.testing.uses_gpu
 @pytest.mark.parametrize("dtype", ["float32", "float16"])
 def test_lrn(executor_kind, dtype):

Reply via email to