Mohamad11Dab opened a new issue, #17998:
URL: https://github.com/apache/tvm/issues/17998

   
   ### Expected behavior
   
   TVM and ONNXRuntime should give the same output
   
   ### Actual behavior
   
   `––––– MISMATCH DETECTED –––––
   
   Not equal to tolerance rtol=0.01, atol=0.001
   
   Mismatched elements: 2 / 4096 (0.0488%)
   Max absolute difference: 0.00342751
   Max relative difference: 0.28056583
    x: array([[[ 0.046975,  0.060682, -0.14202 , ..., -0.08296 , -0.002197,
            -0.10205 ],
           [ 0.134745, -0.247786,  0.098007, ...,  0.245494,  0.23562 ,...
    y: array([[[ 0.046958,  0.060696, -0.14199 , ..., -0.08295 , -0.002193,
            -0.102041],
           [ 0.134729, -0.247741,  0.098011, ...,  0.245464,  0.235588,...`
   
   ### Environment
   
   TVM:0.17.0
   ONNXRuntime:1.16.3
   
   ### Steps to reproduce
   
   ```python
   import random
   import sys, os
   sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 
'..')))
   import torch
   import torch.nn as nn
   import torch.nn.functional as F
   import tempfile
   import onnx
   import onnxruntime as ort
   from numpy.testing import assert_allclose
   import tvm
   from tvm import relay
   from tvm.contrib import graph_executor
   import numpy as np
   import nas_model_2
   
   class SimpleBugModel(nn.Module):
       def __init__(self):
           super().__init__()
           self.input_conv = torch.nn.modules.conv.Conv2d(in_channels=3, 
out_channels=16, kernel_size=1)
           self.block0 = torch.nn.modules.pooling.MaxPool2d(ceil_mode=True, 
kernel_size=2, stride=2)
           self.block1 = 
torch.nn.modules.instancenorm.InstanceNorm2d(num_features=16)
           self.block2 = nas_model_2.ReciprocalWrapper()
           self.block3 = torch.nn.modules.conv.Conv2d(kernel_size=5, padding=2, 
in_channels=16,
       out_channels=16)
           self.block4 = torch.nn.modules.conv.Conv2d(kernel_size=3, padding=1, 
in_channels=16,
       out_channels=16)
           self.block5 = torch.nn.modules.normalization.GroupNorm(num_groups=4, 
num_channels=16)
           self.block6 = torch.nn.modules.flatten.Flatten(start_dim=2)
           self.block7 = torch.nn.modules.conv.Conv1d(kernel_size=3, padding=1, 
in_channels=16,
       out_channels=16)
           self.block8 = torch.nn.modules.batchnorm.BatchNorm1d(num_features=16)
           self.block9 = torch.nn.modules.linear.Identity()
   
       def forward(self, x):
           __input_conv = self.input_conv(x)
           __blocks__0 = self.block0(__input_conv)
           __blocks__1 = self.block1(__blocks__0)
           __blocks__2 = self.block2(__blocks__1)
           __blocks__3 = self.block3(__blocks__2)
           __blocks__4 = self.block4(__blocks__3)
           __blocks__5 = self.block5(__blocks__4)
           __blocks__6 = self.block6(__blocks__5)
           __blocks__7 = self.block7(__blocks__6)
           __blocks__8 = self.block8(__blocks__7)
           __blocks__9 = self.block9(__blocks__8)
           return __blocks__9
   
   def main():
       seed=295007241
       random.seed(seed)
       np.random.seed(seed)
       torch.manual_seed(seed)
       model = SimpleBugModel()
       model.eval()
       dummy = torch.randn(1, 3, 32, 32, dtype=torch.float32)
       with tempfile.NamedTemporaryFile(suffix='.onnx', delete=False) as tmp:
           onnx_path = tmp.name
       torch.onnx.export(model, dummy, onnx_path, opset_version=19, 
input_names=['input'], output_names=['output'])
   
       ort_sess = ort.InferenceSession(onnx_path, 
providers=['CPUExecutionProvider'])
       ort_out = ort_sess.run(None, {'input': dummy.numpy()})[0]
   
       onnx_model = onnx.load(onnx_path)
   
       shape_dict = {'input': dummy.numpy().shape}
       mod, params = relay.frontend.from_onnx(onnx_model, shape_dict, 
freeze_params=True)
       with tvm.transform.PassContext(opt_level=4):
           lib = relay.build(mod, target='llvm', params=params)
       m = graph_executor.GraphModule(lib['default'](tvm.cpu()))
       m.set_input('input', tvm.nd.array(dummy.numpy()))
       m.run()
       tvm_out = m.get_output(0)
       tvm_out = tvm_out.numpy()
   
       try:
           assert_allclose(ort_out, tvm_out, rtol=1e-2, atol=1e-3, 
equal_nan=True)
       except AssertionError as e:
           print('––––– MISMATCH DETECTED –––––')
           print(e)
       except Exception as e:
           print('––––– UNEXPECTED ERROR DURING COMPARISON –––––')
           print(f'{type(e).__name__}: {e}')
   
   if __name__ == '__main__':
       main()
   ``` 
   
   ```python
   ## nas_model_2
   
   @basic_unit
   class ReciprocalWrapper(nni_nn.Module):
       def forward(self, x): 
           return torch.reciprocal(x)
   
   ``` 
   ### Triage
   
   
   * needs-triage
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to