ntcmp2u commented on issue #15907:
URL: https://github.com/apache/tvm/issues/15907#issuecomment-1793374902
Hi @CookedMelon , I generated the model with a custom fuzzer. Bellow I
provide a code snippet which does not depend on external model files and
illustrate how the model is like. I think it will help developers locate which
operator they optimize in a wrong way. Are you a developer of TVM? Will this
bug be fixed?
```python
import onnxruntime as ort
import onnx
import numpy as np
import pickle
from numpy import testing
import tvm
from tvm import relay
import torch
class Model0(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, *args):
_args = args
getitem = _args[0]
tril = getitem.tril(0)
div = torch.div(tril, tril)
to = div.to(dtype = torch.int64)
getitem_1 = to[(slice(-13, -12, 1), slice(None, None, None))]
expand = getitem_1.expand(1, 25)
return (expand,)
model_0 = Model0()
output_names_0 = ['v4_0']
input_dict_0 = pickle.load(open('./0.pickle', 'rb'))
inputs_0 = tuple(torch.from_numpy(v).to('cpu') for _, v in
input_dict_0.items())
torch.onnx.export(model_0, inputs_0, '0.onnx', verbose=False,
input_names=['v5_0'], output_names=output_names_0, opset_version=14,
do_constant_folding=False)
onnx_model_0 = onnx.load('0.onnx')
onnx_model_outputs_0 = [node.name for node in onnx_model_0.graph.output]
shape_dict_0 = {key: val.shape for key, val in input_dict_0.items()}
mod_0, params_0 = relay.frontend.from_onnx(onnx_model_0, shape_dict_0,
freeze_params=True)
def func():
with tvm.transform.PassContext(opt_level=4):
executor_0 = relay.build_module.create_executor("graph", mod_0,
tvm.cpu(), tvm.target.Target("llvm"), params_0).evaluate()
executor_res_0 = [executor_0(**input_dict_0).numpy()]
output_0 = dict(zip(onnx_model_outputs_0, executor_res_0))
return output_0
output_0 = func()
output_1 = func()
print('=========================')
try:
for tensor_name in output_names_0:
testing.assert_allclose(output_0[tensor_name], output_1[tensor_name])
print("no problem")
except AssertionError as e:
print("assertion failure for inconsistency")
print(e)
print('=========================')
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]