Cookiee235 opened a new issue, #17210:
URL: https://github.com/apache/tvm/issues/17210
### Actual behavior
```
Traceback (most recent call last):
File "/share_container/optfuzz/res/bugs/3_simple.py", line 36, in <module>
mod = relax.transform.MergeCompositeFunctions()(mod) # crash here
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/software/tvm-lunder/python/tvm/ir/transform.py", line 238, in
__call__
return _ffi_transform_api.RunPass(self, mod)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/software/tvm-lunder/python/tvm/_ffi/_ctypes/packed_func.py", line
240, in __call__
raise_last_ffi_error()
File "/software/tvm-lunder/python/tvm/_ffi/base.py", line 481, in
raise_last_ffi_error
raise py_err
tvm.error.InternalError: Traceback (most recent call last):
17:
tvm::runtime::PackedFuncObj::Extractor<tvm::runtime::PackedFuncSubObj<tvm::runtime::TypedPackedFunc<tvm::IRModule
(tvm::transform::Pass,
tvm::IRModule)>::AssignTypedLambda<tvm::transform::{lambda(tvm::transform::Pass,
tvm::IRModule)#7}>(tvm::transform::{lambda(tvm::transform::Pass,
tvm::IRModule)#7}, std::__cxx11::basic_string<char, std::char_traits<char>,
std::allocator<char> >)::{lambda(tvm::runtime::TVMArgs const&,
tvm::runtime::TVMRetValue*)#1}> >::Call(tvm::runtime::PackedFuncObj const*,
tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)
16: tvm::transform::Pass::operator()(tvm::IRModule) const
15: tvm::transform::Pass::operator()(tvm::IRModule,
tvm::transform::PassContext const&) const
14: tvm::transform::ModulePassNode::operator()(tvm::IRModule,
tvm::transform::PassContext const&) const
13:
tvm::runtime::PackedFuncObj::Extractor<tvm::runtime::PackedFuncSubObj<tvm::runtime::TypedPackedFunc<tvm::IRModule
(tvm::IRModule,
tvm::transform::PassContext)>::AssignTypedLambda<tvm::relax::transform::MergeCompositeFunctions()::{lambda(tvm::IRModule,
tvm::transform::PassContext)#1}>(tvm::relax::transform::MergeCompositeFunctions()::{lambda(tvm::IRModule,
tvm::transform::PassContext)#1})::{lambda(tvm::runtime::TVMArgs const&,
tvm::runtime::TVMRetValue*)#1}> >::Call(tvm::runtime::PackedFuncObj const*,
tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)
12: tvm::relax::MergeCompositeFunctions(tvm::IRModule)
11: tvm::relax::MakeGroupedFunctions(tvm::IRModule,
std::unordered_map<tvm::runtime::Object const*,
tvm::relay::GraphPartitioner::Group*, std::hash<tvm::runtime::Object const*>,
std::equal_to<tvm::runtime::Object const*>,
std::allocator<std::pair<tvm::runtime::Object const* const,
tvm::relay::GraphPartitioner::Group*> > > const&, bool,
tvm::runtime::Array<tvm::runtime::String, void> const&)
10:
tvm::relax::OperatorFusor::Transform(tvm::runtime::Array<tvm::runtime::String,
void> const&)
9: tvm::relax::ExprMutator::VisitExpr(tvm::RelayExpr const&)
8:
_ZZN3tvm5relax11ExprFunctorIFNS_9RelayExprERKS2_EE10InitVTableEvENUlRKNS_7r
7: tvm::relax::ExprMutator::VisitExpr_(tvm::relax::FunctionNode const*)
6: tvm::relax::ExprMutator::VisitWithNewScope(tvm::RelayExpr const&,
tvm::runtime::Optional<tvm::runtime::Array<tvm::relax::Var, void> >)
5: tvm::relax::ExprMutator::VisitExpr(tvm::RelayExpr const&)
4:
_ZZN3tvm5relax11ExprFunctorIFNS_9RelayExprERKS2_EE10InitVTableEvENUlRKNS_7r
3: tvm::relax::ExprMutator::VisitExpr_(tvm::relax::SeqExprNode const*)
2: tvm::relax::ExprMutator::VisitBindingBlock(tvm::relax::BindingBlock
const&)
1:
tvm::relax::OperatorFusor::VisitBindingBlock_(tvm::relax::DataflowBlockNode
const*)
0:
tvm::relax::OperatorFusor::CollectFuncBindings(tvm::runtime::Array<tvm::relax::Binding,
void> const&)
File "/software/tvm-lunder/src/relax/transform/fuse_ops.cc", line 941
InternalError: Check failed: (it_group != obj2group_.end()) is false:
Variable gv could not be found in any group
```
### Environment
Any environment details, such as: Operating System, TVM version, etc
### Steps to reproduce
```
from tvm import relax
from tvm.script import ir as I
from tvm.script import relax as R
@I.ir_module
class Module:
@R.function(private=True)
def fused_relax_nn_conv2d_relax_nn_relu(data1: R.Tensor((1, 64, 56, 56),
dtype="float32"), weight11: R.Tensor((64, 64, 3, 3), dtype="float32")) ->
R.Tensor((1, 64, 56, 56), dtype="float32"):
R.func_attr({"Composite": "dnnl.conv2d_relu", "Primitive": 1})
cls = Module
with R.dataflow():
lv1 = R.nn.conv2d(data1, weight11)
gv1 = R.nn.relu(lv1)
R.output(gv1)
return gv1
@R.function(private=False)
def main2(data: R.Tensor((1, 64, 56, 56), dtype="float32"), weight1:
R.Tensor((64, 64, 3, 3), dtype="float32"), weight2: R.Tensor((64, 64, 3, 3),
dtype="float32")) -> R.Tensor((1, 64, 54, 54), dtype="float32"):
cls = Module
with R.dataflow():
gv: R.Tensor((1, 64, 56, 56), dtype="float32") =
cls.fused_relax_nn_conv2d_relax_nn_relu(data, weight1)
R.output(gv)
return gv
@R.function
def main(data: R.Tensor((1, 64, 56, 56), dtype="float32"), weight1:
R.Tensor((64, 64, 3, 3), dtype="float32"), weight2: R.Tensor((64, 64, 3, 3),
dtype="float32")) -> R.Tensor((1, 64, 54, 54), dtype="float32"):
cls = Module
with R.dataflow():
gv22: R.Tensor((1, 64, 56, 56), dtype="float32") =
cls.fused_relax_nn_conv2d_relax_nn_relu(data, weight1)
R.output(gv22)
return gv22
mod = Module
mod.show()
mod = relax.transform.MergeCompositeFunctions()(mod) # crash here
```
cc @Lunderberg @tqchen
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]