This is an automated email from the ASF dual-hosted git repository.

tkonolige pushed a commit to branch tkonolige/relax_pad_etc_new
in repository https://gitbox.apache.org/repos/asf/tvm.git

commit 82e6162f23c2a406d2e371d739038bb3df137b2a
Author: Tristan Konolige <[email protected]>
AuthorDate: Wed May 17 20:19:08 2023 +0000

    bad rebase
---
 python/tvm/relax/frontend/torch/fx_translator.py | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/python/tvm/relax/frontend/torch/fx_translator.py 
b/python/tvm/relax/frontend/torch/fx_translator.py
index 6222b11432..fcbe3574e1 100644
--- a/python/tvm/relax/frontend/torch/fx_translator.py
+++ b/python/tvm/relax/frontend/torch/fx_translator.py
@@ -1222,7 +1222,6 @@ class TorchFXImporter:
             )
         )
 
-<<<<<<< HEAD
     def _scaled_dot_product_attention(self, node: fx.node.Node) -> relax.Var:
         assert len(node.args) <= 4, "Dropout, and causal masking are not 
supported."
         transpose_S_H = lambda tensor: relax.op.permute_dims(tensor, [0, 2, 1, 
3])
@@ -1240,8 +1239,6 @@ class TorchFXImporter:
 
         return self.block_builder.emit(attn)
 
-||||||| parent of 97e234777 (Add einsum, gelu, pad support to relax)
-=======
     def _pad_common(self, mode, pad_value, inputs):
         data = self.env[inputs[0]]
         pad_list = inputs[1]

Reply via email to