mehrdadh commented on issue #9216:
URL: https://github.com/apache/tvm/issues/9216#issuecomment-937985447


   I tried to reproduce this. Tested micro_tflite tutorial on a microtvm RVM 
and got this traceback:
   ```
   Model Version: 3
   Traceback (most recent call last):
     File "tutorials/micro/micro_tflite.py", line 211, in <module>
       module = relay.build(mod, target=TARGET, params=params)
     File 
"/home/mhessar/mlperftiny/3rdparty/tvm/python/tvm/relay/build_module.py", line 
358, in build
       mod=ir_mod, target=target, params=params, executor=executor, 
mod_name=mod_name
     File 
"/home/mhessar/mlperftiny/3rdparty/tvm/python/tvm/relay/build_module.py", line 
172, in build
       self._build(mod, target, target_host, executor, mod_name)
     File 
"/home/mhessar/mlperftiny/3rdparty/tvm/python/tvm/_ffi/_ctypes/packed_func.py", 
line 237, in __call__
       raise get_last_ffi_error()
   tvm._ffi.base.TVMError: Traceback (most recent call last):
     8: TVMFuncCall
     7: _ZNSt17_Function_handlerIFvN
     6: 
tvm::relay::backend::RelayBuildModule::GetFunction(std::__cxx11::basic_string<char,
 std::char_traits<char>, std::allocator<char> > const&, 
tvm::runtime::ObjectPtr<tvm::runtime::Object> 
const&)::{lambda(tvm::runtime::TVMArgs, 
tvm::runtime::TVMRetValue*)#3}::operator()(tvm::runtime::TVMArgs, 
tvm::runtime::TVMRetValue*) const
     5: tvm::relay::backend::RelayBuildModule::BuildRelay(tvm::IRModule, 
std::unordered_map<std::__cxx11::basic_string<char, std::char_traits<char>, 
std::allocator<char> >, tvm::runtime::NDArray, 
std::hash<std::__cxx11::basic_string<char, std::char_traits<char>, 
std::allocator<char> > >, std::equal_to<std::__cxx11::basic_string<char, 
std::char_traits<char>, std::allocator<char> > >, 
std::allocator<std::pair<std::__cxx11::basic_string<char, 
std::char_traits<char>, std::allocator<char> > const, tvm::runtime::NDArray> > 
> const&, tvm::runtime::String)
     4: std::_Function_handler<void (tvm::runtime::TVMArgs, 
tvm::runtime::TVMRetValue*), 
tvm::relay::backend::GraphExecutorCodegenModule::GetFunction(std::__cxx11::basic_string<char,
 std::char_traits<char>, std::allocator<char> > const&, 
tvm::runtime::ObjectPtr<tvm::runtime::Object> 
const&)::{lambda(tvm::runtime::TVMArgs, 
tvm::runtime::TVMRetValue*)#2}>::_M_invoke(std::_Any_data const&, 
tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
     3: 
tvm::relay::backend::GraphExecutorCodegen::Codegen(tvm::relay::Function, 
tvm::runtime::String)
     2: 
tvm::relay::backend::MemoizedExprTranslator<std::vector<tvm::relay::backend::GraphNodeRef,
 std::allocator<tvm::relay::backend::GraphNodeRef> > 
>::VisitExpr(tvm::RelayExpr const&)
     1: _ZZN3tvm5relay11ExprFunctorIFSt6vectorINS0_7backend12GraphNodeRefESaI
     0: 
tvm::relay::backend::GraphExecutorCodegen::VisitExpr_(tvm::relay::CallNode 
const*)
     File 
"/home/mhessar/mlperftiny/3rdparty/tvm/src/relay/backend/graph_executor_codegen.cc",
 line 456
   TVMError:
   ---------------------------------------------------------------
   An error occurred during the execution of TVM.
   For more information, please see: https://tvm.apache.org/docs/errors.html
   ---------------------------------------------------------------
     Check failed: (global_node) is false: Non-primitive-call nodes should have 
been transformed away.
   The graph executor code generator expects all calls to have their callee 
normalized to a GlobalVar, but found:
   free_var %dense_4_input: Tensor[(1), float32];
   %0 = reshape(%dense_4_input, newshape=[-1, 1]) /* ty=Tensor[(1, 1), float32] 
*/;
   %1 = nn.dense(%0, meta[relay.Constant][0] /* ty=Tensor[(16, 1), float32] */, 
units=16) /* ty=Tensor[(1, 16), float32] */;
   %2 = add(%1, meta[relay.Constant][1] /* ty=Tensor[(16), float32] */) /* 
ty=Tensor[(1, 16), float32] */;
   %3 = nn.relu(%2) /* ty=Tensor[(1, 16), float32] */;
   %4 = reshape(%3, newshape=[-1, 16]) /* ty=Tensor[(1, 16), float32] */;
   %5 = nn.dense(%4, meta[relay.Constant][2] /* ty=Tensor[(16, 16), float32] 
*/, units=16) /* ty=Tensor[(1, 16), float32] */;
   %6 = add(%5, meta[relay.Constant][3] /* ty=Tensor[(16), float32] */) /* 
ty=Tensor[(1, 16), float32] */;
   %7 = nn.relu(%6) /* ty=Tensor[(1, 16), float32] */;
   %8 = reshape(%7, newshape=[-1, 16]) /* ty=Tensor[(1, 16), float32] */;
   %9 = nn.dense(%8, meta[relay.Constant][4] /* ty=Tensor[(1, 16), float32] */, 
units=1) /* ty=Tensor[(1, 1), float32] */;
   add(%9, meta[relay.Constant][5] /* ty=Tensor[(1), float32] */) /* 
ty=Tensor[(1, 1), float32] */
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to