MasterJH5574 commented on code in PR #16757:
URL: https://github.com/apache/tvm/pull/16757#discussion_r1536887971


##########
python/tvm/relax/frontend/nn/exporter.py:
##########
@@ -176,35 +183,26 @@ def _unwrap_ret(expr: typing.Any) -> typing.Any:
     def _convert_input(arg):
         if isinstance(arg, tir.Var):
             return rx.Var(arg.name, struct_info=ShapeStructInfo(values=[arg]))
-        if isinstance(arg, (core.Tensor, core.Object)):
+        elif isinstance(arg, (core.Tensor, core.Object)):
             return arg._expr  # pylint: disable=protected-access
-        if isinstance(arg, _spec.Tuple):
+        elif isinstance(arg, _spec.Tuple):
             return rx.Var(
                 arg.name,
                 struct_info=TupleStructInfo(
                     [_convert_input(arg_i).struct_info for arg_i in 
arg.elements]
                 ),
             )
-        raise TypeError(f"Unsupported input type: {type(arg)}")
+        elif isinstance(arg, rx.Expr):
+            return arg
+        else:
+            raise TypeError(f"Unsupported input type: {type(arg)}")
 
     def _params(mode: str) -> typing.List[rx.Var]:
         inputs: typing.List[rx.Var] = []
 
-        def _get_var(shape_var: tir.Var) -> tir.Var:
-            name = shape_var.name
-            if name in str2var_params:
-                return str2var_params[name]

Review Comment:
   Thank you @Lunderberg so much. I just made a regression test for this 
behavior and we can add it to `test_frontend_nn_module.py` later on. The test 
basically reflects what I described above using language. Please let me know if 
the test makes sense to you!
   
   ```python
   def test_linear_dynamic_shape():
       @R.function
       def forward(
           x: R.Tensor((1, 4), dtype="float32"),
           _io: R.Object,
           weight: R.Tensor(("n", 4), dtype="float32"),
           bias: R.Tensor(("n",), dtype="float32"),
       ) -> R.Tuple(R.Tensor((1, "n"), dtype="float32"), R.Tuple(R.Object)):
           n = T.int64()
           R.func_attr({"num_input": 2})
           with R.dataflow():
               permute_dims: R.Tensor((4, n), dtype="float32") = 
R.permute_dims(weight, axes=None)
               matmul: R.Tensor((1, n), dtype="float32") = R.matmul(x, 
permute_dims, out_dtype="void")
               add: R.Tensor((1, n), dtype="float32") = R.add(matmul, bias)
               gv1: R.Tuple(R.Tensor((1, n), dtype="float32"), 
R.Tuple(R.Object)) = add, (_io,)
               R.output(gv1)
           return gv1
   
       mod = modules.Linear(in_features=4, out_features="n", bias=True)
       tvm_mod, _ = mod.export_tvm(spec={"forward": {"x": spec.Tensor((1, 4), 
"float32")}}, debug=True)
       assert_structural_equal(tvm_mod["forward"], forward, True)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to