wzh99 opened a new pull request, #11728:
URL: https://github.com/apache/tvm/pull/11728

   This PR fixes #11684. I replace `IdentityRel` in `nn.softmax`, 
`nn.fast_softmax` and `nn.log_softmax` with a newly implemented `SoftmaxRel` so 
that the attribute `axis` is be checked during type inference. For the test 
case shown in #11684, the following error is reported:
   
   ```
   The axis is not in range [-1, 1)
   Traceback (most recent call last):
     File "/Users/wzh/tvm-bug/bug_softmax_axis.py", line 8, in <module>
       mod = relay.transform.InferType()(mod)
     File "/Users/wzh/tvm-dev/python/tvm/ir/transform.py", line 161, in __call__
       return _ffi_transform_api.RunPass(self, mod)
     File "/Users/wzh/tvm-dev/python/tvm/_ffi/_ctypes/packed_func.py", line 
237, in __call__
       raise get_last_ffi_error()
   tvm._ffi.base.TVMError: Traceback (most recent call last):
     [bt] (8) 9   libtvm.dylib                        0x0000000119ef03b4 
tvm::runtime::PackedFuncObj::Extractor<tvm::runtime::PackedFuncSubObj<void 
tvm::runtime::TypedPackedFunc<tvm::IRModule (tvm::transform::Pass, 
tvm::IRModule)>::AssignTypedLambda<tvm::transform::$_6>(tvm::transform::$_6, 
std::__1::basic_string<char, std::__1::char_traits<char>, 
std::__1::allocator<char> >)::'lambda'(tvm::runtime::TVMArgs const&, 
tvm::runtime::TVMRetValue*)> >::Call(tvm::runtime::PackedFuncObj const*, 
tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*) + 948
     [bt] (7) 8   libtvm.dylib                        0x0000000119ee5964 
tvm::transform::Pass::operator()(tvm::IRModule) const + 148
     [bt] (6) 7   libtvm.dylib                        0x0000000119ee5d71 
tvm::transform::Pass::operator()(tvm::IRModule, tvm::transform::PassContext 
const&) const + 753
     [bt] (5) 6   libtvm.dylib                        0x0000000119ee6873 
tvm::transform::ModulePassNode::operator()(tvm::IRModule, 
tvm::transform::PassContext const&) const + 819
     [bt] (4) 5   libtvm.dylib                        0x000000011b21ddfd 
tvm::runtime::PackedFuncObj::Extractor<tvm::runtime::PackedFuncSubObj<void 
tvm::runtime::TypedPackedFunc<tvm::IRModule (tvm::IRModule, 
tvm::transform::PassContext)>::AssignTypedLambda<tvm::relay::transform::InferType()::$_2>(tvm::relay::transform::InferType()::$_2)::'lambda'(tvm::runtime::TVMArgs
 const&, tvm::runtime::TVMRetValue*)> >::Call(tvm::runtime::PackedFuncObj 
const*, tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*) + 1933
     [bt] (3) 4   libtvm.dylib                        0x000000011b20d217 
tvm::relay::TypeInferencer::Infer(tvm::GlobalVar, tvm::relay::Function) + 135
     [bt] (2) 3   libtvm.dylib                        0x000000011afd2a2f 
tvm::relay::TypeSolver::Solve() + 1615
     [bt] (1) 2   libtvm.dylib                        0x0000000119b86699 
tvm::runtime::detail::LogFatal::Entry::Finalize() + 89
     [bt] (0) 1   libtvm.dylib                        0x000000011b5a3508 
tvm::runtime::Backtrace() + 24
     [bt] (8) 9   libtvm.dylib                        0x000000011b20d217 
tvm::relay::TypeInferencer::Infer(tvm::GlobalVar, tvm::relay::Function) + 135
     [bt] (7) 8   libtvm.dylib                        0x000000011afd285c 
tvm::relay::TypeSolver::Solve() + 1148
     [bt] (6) 7   libtvm.dylib                        0x000000011afd2dd0 
tvm::TypedEnvFunc<bool (tvm::runtime::Array<tvm::Type, void> const&, int, 
tvm::Attrs const&, tvm::TypeReporter 
const&)>::operator()(tvm::runtime::Array<tvm::Type, void> const&, int, 
tvm::Attrs const&, tvm::TypeReporter const&) const + 416
     [bt] (5) 6   libtvm.dylib                        0x000000011a08b154 
tvm::runtime::PackedFuncObj::Extractor<tvm::runtime::PackedFuncSubObj<void 
tvm::runtime::TypedPackedFunc<bool (tvm::runtime::Array<tvm::Type, void> 
const&, int, tvm::Attrs const&, tvm::TypeReporter 
const&)>::AssignTypedLambda<bool (*)(tvm::runtime::Array<tvm::Type, void> 
const&, int, tvm::Attrs const&, tvm::TypeReporter const&)>(bool 
(*)(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, 
tvm::TypeReporter const&))::'lambda'(tvm::runtime::TVMArgs const&, 
tvm::runtime::TVMRetValue*)> >::Call(tvm::runtime::PackedFuncObj const*, 
tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*) + 20
     [bt] (4) 5   libtvm.dylib                        0x000000011a08b563 void 
tvm::runtime::TypedPackedFunc<bool (tvm::runtime::Array<tvm::Type, void> 
const&, int, tvm::Attrs const&, tvm::TypeReporter 
const&)>::AssignTypedLambda<bool (*)(tvm::runtime::Array<tvm::Type, void> 
const&, int, tvm::Attrs const&, tvm::TypeReporter const&)>(bool 
(*)(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, 
tvm::TypeReporter const&))::'lambda'(tvm::runtime::TVMArgs const&, 
tvm::runtime::TVMRetValue*)::operator()(tvm::runtime::TVMArgs const&, 
tvm::runtime::TVMRetValue*) const + 1027
     [bt] (3) 4   libtvm.dylib                        0x000000011acd163e 
tvm::relay::SoftmaxRel(tvm::runtime::Array<tvm::Type, void> const&, int, 
tvm::Attrs const&, tvm::TypeReporter const&) + 942
     [bt] (2) 3   libtvm.dylib                        0x0000000119e6a08b 
tvm::DiagnosticContext::Render() + 459
     [bt] (1) 2   libtvm.dylib                        0x0000000119b86699 
tvm::runtime::detail::LogFatal::Entry::Finalize() + 89
     [bt] (0) 1   libtvm.dylib                        0x000000011b5a3508 
tvm::runtime::Backtrace() + 24
     File "/Users/wzh/tvm-dev/src/relay/analysis/type_solver.cc", line 624
   TVMError: 
   ---------------------------------------------------------------
   An error occurred during the execution of TVM.
   For more information, please see: https://tvm.apache.org/docs/errors.html
   ---------------------------------------------------------------
     Check failed: (false) is false: [15:01:35] 
/Users/wzh/tvm-dev/src/ir/diagnostic.cc:105: DiagnosticError: one or more error 
diagnostics were emitted, please check diagnostic render for output.
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to