areusch opened a new issue #9013:
URL: https://github.com/apache/tvm/issues/9013
I think this might be a compiler caching bug cc @tqchen @jroesch @mbs-octoml
. Not caught in the regression because the regression rebuilds only changed
tutorials.
Steps to reproduce:
1. `git checkout dc2f70e3c8a9b14b9e414ecf768ad32e6c3c3960`
2. `rm -rf build`
3. `docker/bash.sh ci_gpu tests/scripts/task_config_build_gpu.sh`
4. `docker/bash.sh ci_gpu tests/scripts/task_build.sh build -j16`
5. `docker/bash.sh ci_gpu bash -c 'cd docs && make clean'`
6. `docker/bash.sh ci_gpu tests/scripts/task_ci_setup.sh`
7. `docker/bash.sh ci_gpu tests/scripts/task_python_docs.sh`
Will show this traceback somewhere along the way. micro_autotune was just
trying to build a relay model. I _think_ the shapes look correct to me.
```
conv2d: requires that `0`, the input channels (0) divided by groups (1),
[1324/4555]
must match the input channels of the weight `3`, where the weight shape is
([6, 3, 5, 5]).
The type inference pass was unable to infer a type for this expression.
This usually occurs when an operator call is under constrained in some way,
check other reported errors for hints of what may of happened.
WARNING: /home/areusch/ws/tvm4/tutorials/micro/micro_autotune.py failed to
execute correctly: Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/sphinx_gallery/gen_rst.py",
line 480, in _memory_usage
out = func()
File "/usr/local/lib/python3.6/dist-packages/sphinx_gallery/gen_rst.py",
line 465, in __call__
exec(self.code, self.globals)
File "/home/areusch/ws/tvm4/tutorials/micro/micro_autotune.py", line 179,
in <module>
lowered = tvm.relay.build(relay_mod, target=TARGET, params=params)
File "../../python/tvm/relay/build_module.py", line 358, in build
mod=ir_mod, target=target, params=params, executor=executor,
mod_name=mod_name
File "../../python/tvm/relay/build_module.py", line 172, in build
self._build(mod, target, target_host, executor, mod_name)
File "tvm/_ffi/_cython/./packed_func.pxi", line 323, in
tvm._ffi._cy3.core.PackedFuncBase.__call__
File "tvm/_ffi/_cython/./packed_func.pxi", line 267, in
tvm._ffi._cy3.core.FuncCall
File "tvm/_ffi/_cython/./base.pxi", line 163, in tvm._ffi._cy3.core.CALL
tvm.error.DiagnosticError: Traceback (most recent call last):
27: TVMFuncCall
at /home/areusch/ws/tvm4/src/runtime/c_runtime_api.cc:474
26: tvm::runtime::PackedFunc::CallPacked(tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*) const
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1151
25: std::function<void (tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*)>::operator()(tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*) const
at /usr/include/c++/7/bits/std_function.h:706
24: std::_Function_handler<void (tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*),
tvm::relay::backend::RelayBuildModule::GetFunction(std::__cxx11::basic_string<char,
std::char_traits<c
har>, std::allocator<char> > const&,
tvm::runtime::ObjectPtr<tvm::runtime::Object>
const&)::{lambda(tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*)#3}>::_M_invoke(std::_Any_data const&,
tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
at /usr/include/c++/7/bits/std_function.h:316
23:
tvm::relay::backend::RelayBuildModule::GetFunction(std::__cxx11::basic_string<char,
std::char_traits<char>, std::allocator<char> > const&,
tvm::runtime::ObjectPtr<tvm::runtime::Object
> const&)::{lambda(tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*)#3}::operator()(tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*) const
at /home/areusch/ws/tvm4/src/relay/backend/build_module.cc:181
22: tvm::relay::backend::RelayBuildModule::Build(tvm::IRModule,
tvm::runtime::Map<tvm::Integer, tvm::Target, void, void> const&, tvm::Target
const&, tvm::runtime::String, tvm::runtime::St
ring)
at /home/areusch/ws/tvm4/src/relay/backend/build_module.cc:288
21: tvm::relay::backend::RelayBuildModule::BuildRelay(tvm::IRModule,
std::unordered_map<std::__cxx11::basic_string<char, std::char_traits<char>,
std::allocator<char> >, tvm::runtime::NDAr
ray, std::hash<std::__cxx11::basic_string<char, std::char_traits<char>,
std::allocator<char> > >, std::equal_to<std::__cxx11::basic_string<char,
std::char_traits<char>, std::allocator<char>
> >, std::allocator<std::pair<std::__cxx11::basic_string<char,
std::char_traits<char>, std::allocator<char> > const, tvm::runtime::NDArray> >
> const&, tvm::runtime::String)
at /home/areusch/ws/tvm4/src/relay/backend/build_module.cc:479
20: tvm::relay::backend::RelayBuildModule::Optimize(tvm::IRModule,
tvm::runtime::Map<tvm::Integer, tvm::Target, void, void> const&,
std::unordered_map<std::__cxx11::basic_string<char, std
::char_traits<char>, std::allocator<char> >, tvm::runtime::NDArray,
std::hash<std::__cxx11::basic_string<char, std::char_traits<char>,
std::allocator<char> > >, std::equal_to<std::__cxx11::
basic_string<char, std::char_traits<char>, std::allocator<char> > >,
std::allocator<std::pair<std::__cxx11::basic_string<char,
std::char_traits<char>, std::allocator<char> > const, tvm::run
time::NDArray> > > const&)
at /home/areusch/ws/tvm4/src/relay/backend/build_module.cc:329
19: tvm::transform::Pass::operator()(tvm::IRModule) const
at /home/areusch/ws/tvm4/src/ir/transform.cc:255
18: tvm::transform::Pass::operator()(tvm::IRModule,
tvm::transform::PassContext const&) const
at /home/areusch/ws/tvm4/src/ir/transform.cc:267
17: tvm::transform::SequentialNode::operator()(tvm::IRModule,
tvm::transform::PassContext const&) const
at /home/areusch/ws/tvm4/src/ir/transform.cc:481
16: tvm::transform::Pass::operator()(tvm::IRModule,
tvm::transform::PassContext const&) const
at /home/areusch/ws/tvm4/src/ir/transform.cc:267
15: tvm::relay::transform::FunctionPassNode::operator()(tvm::IRModule,
tvm::transform::PassContext const&) const
at /home/areusch/ws/tvm4/src/relay/ir/transform.cc:160
14: tvm::transform::Pass::operator()(tvm::IRModule) const
at /home/areusch/ws/tvm4/src/ir/transform.cc:255
13: tvm::transform::Pass::operator()(tvm::IRModule,
tvm::transform::PassContext const&) const
at /home/areusch/ws/tvm4/src/ir/transform.cc:267
12: tvm::transform::ModulePassNode::operator()(tvm::IRModule,
tvm::transform::PassContext const&) const
at /home/areusch/ws/tvm4/src/ir/transform.cc:415
11: tvm::runtime::TypedPackedFunc<tvm::IRModule (tvm::IRModule,
tvm::transform::PassContext)>::operator()(tvm::IRModule,
tvm::transform::PassContext) const
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1498
10: tvm::IRModule
tvm::runtime::detail::typed_packed_call_dispatcher<tvm::IRModule>::run<tvm::IRModule,
tvm::transform::PassContext>(tvm::runtime::PackedFunc const&, tvm::IRModule&&,
tvm:
:transform::PassContext&&)
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1444
9: tvm::runtime::TVMRetValue
tvm::runtime::PackedFunc::operator()<tvm::IRModule,
tvm::transform::PassContext>(tvm::IRModule&&, tvm::transform::PassContext&&)
const
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1369
8: std::function<void (tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*)>::operator()(tvm::runtime::TVMArgs,
tvm::runtime::TVMRetValue*) const
at /usr/include/c++/7/bits/std_function.h:706
7: _M_invoke
at /usr/include/c++/7/bits/std_function.h:316
6: operator()
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1492
5: unpack_call<tvm::IRModule, 2,
tvm::relay::transform::InferType()::<lambda(tvm::IRModule, const PassContext&)>
>
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1421
4: run<>
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1382
3: run<tvm::runtime::TVMMovableArgValueWithContext_>
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1382
2: run<tvm::runtime::TVMMovableArgValueWithContext_,
tvm::runtime::TVMMovableArgValueWithContext_>
at /home/areusch/ws/tvm4/include/tvm/runtime/packed_func.h:1397
1: operator()
at /home/areusch/ws/tvm4/src/relay/transforms/type_infer.cc:857
0: tvm::DiagnosticContext::Render()
at /home/areusch/ws/tvm4/src/ir/diagnostic.cc:105
File "/home/areusch/ws/tvm4/src/ir/diagnostic.cc", line 105
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]