vicalloy commented on issue #7896:
URL: https://github.com/apache/tvm/issues/7896#issuecomment-824524312
1. I got another error:
```
[21:09:04] /home/hu/tvm/src/relay/transforms/let_list.h:54: Warning: letlist
not used
Traceback (most recent call last):
File "run-tvm.py", line 25, in <module>
exe = relay.vm.compile(mod, target="llvm", target_host=None,
params=params)
File "/home/hu/tvm/python/tvm/relay/backend/vm.py", line 72, in compile
compiler.lower(mod, target)
File "/home/hu/tvm/python/tvm/relay/backend/vm.py", line 143, in lower
self._lower(mod, target, target_host)
File "/home/hu/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 237, in
__call__
raise get_last_ffi_error()
tvm._ffi.base.TVMError: Traceback (most recent call last):
53: TVMFuncCall
52: std::_Function_handler<void (tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*),
tvm::relay::vm::VMCompiler::GetFunction(std::__cxx11::basic_string<char,
std::char_traits<char>, std::allocator<char> > const&,
tvm::runtime::ObjectPtr<tvm::runtime::Object>
const&)::{lambda(tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&,
tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
51: tvm::relay::vm::VMCompiler::Lower(tvm::IRModule,
tvm::runtime::Map<tvm::Integer, tvm::Target, void, void> const&, tvm::Target
const&)
50: tvm::relay::vm::VMCompiler::OptimizeModule(tvm::IRModule,
tvm::runtime::Map<tvm::Integer, tvm::Target, void, void> const&, tvm::Target
const&)
49: tvm::transform::Pass::operator()(tvm::IRModule) const
....
....
....
3:
tvm::relay::backend::MemoizedExprTranslator<tvm::runtime::Array<tvm::te::Tensor,
void> >::VisitExpr(tvm::RelayExpr const&)
2:
_ZZN3tvm5relay11ExprFunctorIFNS_7runtime5ArrayINS_2te6TensorEvEERKNS_9Relay
1: tvm::relay::MakeShapeFunc::VisitExpr_(tvm::relay::CallNode const*)
0: std::_Function_handler<void (tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*),
TVMFuncCreateFromCFunc::{lambda(tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*)#2}>::_M_invoke(std::_Any_data const&,
tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&) [clone .cold]
File "/home/hu/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 81, in
cfun
rv = local_pyfunc(*pyargs)
File "/home/hu/tvm/python/tvm/relay/op/_transform.py", line 1030, in
where_shape_func
bcast_shape = _broadcast_shape_tensors(x_shape, y_shape)
File "<decorator-gen-214>", line 2, in _broadcast_shape_tensors
File "/home/hu/tvm/python/tvm/te/hybrid/__init__.py", line 60, in
wrapped_func
return source_to_op(src, args, func.__globals__, closure_vars)
File "/home/hu/tvm/python/tvm/te/hybrid/parser.py", line 635, in
source_to_op
parser = parse_python(src, args, symbols, closure_vars)
File "/home/hu/tvm/python/tvm/te/hybrid/parser.py", line 605, in
parse_python
parser.parsed_body = parser.visit(root)
File "/usr/lib/python3.8/ast.py", line 363, in visit
return visitor(node)
File "/home/hu/tvm/python/tvm/te/hybrid/parser.py", line 230, in
visit_Module
return self.visit(node.body[0])
File "/usr/lib/python3.8/ast.py", line 363, in visit
return visitor(node)
File "/home/hu/tvm/python/tvm/te/hybrid/parser.py", line 243, in
visit_FunctionDef
res = visit_list_to_block(self.visit, node.body)
File "/home/hu/tvm/python/tvm/te/hybrid/parser.py", line 58, in
visit_list_to_block
lst = [visit(stmt) for stmt in lst if not utils.is_docstring(stmt)]
File "/home/hu/tvm/python/tvm/te/hybrid/parser.py", line 58, in <listcomp>
lst = [visit(stmt) for stmt in lst if not utils.is_docstring(stmt)]
File "/usr/lib/python3.8/ast.py", line 363, in visit
return visitor(node)
File "/home/hu/tvm/python/tvm/te/hybrid/parser.py", line 304, in
visit_Assign
rhs = self.visit(node.value)
File "/usr/lib/python3.8/ast.py", line 363, in visit
return visitor(node)
File "/home/hu/tvm/python/tvm/te/hybrid/parser.py", line 387, in
visit_Subscript
arr = arr[i.value]
File "/home/hu/tvm/python/tvm/ir/container.py", line 36, in __getitem__
return getitem_helper(self, _ffi_api.ArrayGetItem, len(self), idx)
File "/home/hu/tvm/python/tvm/runtime/container.py", line 57, in
getitem_helper
raise IndexError("Index out of range. size: {}, got index
{}".format(length, idx))
IndexError: Index out of range. size: 0, got index 0
```
2. The change:
```diff
diff --git a/python/tvm/relay/op/_transform.py
b/python/tvm/relay/op/_transform.py
index 16262833d..05632e60a 100644
--- a/python/tvm/relay/op/_transform.py
+++ b/python/tvm/relay/op/_transform.py
@@ -1018,7 +1018,12 @@ def where_shape_func(attrs, inputs, _):
"""
Shape func for where.
"""
- cond_shape = inputs[0]
+ def maybe_expand(tensor):
+ if len(tensor.shape) == 0:
+ return topi.full((1,), "int64", 1)
+ return tensor
+
+ cond_shape = maybe_expand(inputs[0])
x_shape = inputs[1]
y_shape = inputs[2]
diff --git a/python/tvm/relay/op/vision/_vision.py
b/python/tvm/relay/op/vision/_vision.py
index 7a31bce5a..8d6abf1a8 100644
--- a/python/tvm/relay/op/vision/_vision.py
+++ b/python/tvm/relay/op/vision/_vision.py
@@ -94,7 +94,7 @@ def _all_class_nms_shape_func(boxes_shape, scores_shape):
count_shape = output_tensor((1,), "int64")
out_shape[0] = boxes_shape[0] * scores_shape[1] * boxes_shape[1]
- out_shape[1] = 3
+ out_shape[1] = int64(3)
count_shape[0] = int64(1)
return out_shape, count_shape
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]