This is an automated email from the ASF dual-hosted git repository.

syfeng pushed a commit to branch unity
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/unity by this push:
     new 5f4412b104 [Unity][Frontend][Onnx] Add support for Elu operator 
(#15937)
5f4412b104 is described below

commit 5f4412b104db47e43da16aa74e393c6bd69737c2
Author: Honglin Zhu <[email protected]>
AuthorDate: Tue Oct 17 08:44:49 2023 +0800

    [Unity][Frontend][Onnx] Add support for Elu operator (#15937)
    
    add elu to relax from onnx
---
 python/tvm/relax/frontend/onnx/onnx_frontend.py | 12 ++++++++++++
 tests/python/relax/test_frontend_onnx.py        |  4 ++++
 2 files changed, 16 insertions(+)

diff --git a/python/tvm/relax/frontend/onnx/onnx_frontend.py 
b/python/tvm/relax/frontend/onnx/onnx_frontend.py
index f0d0c00333..61fd818849 100644
--- a/python/tvm/relax/frontend/onnx/onnx_frontend.py
+++ b/python/tvm/relax/frontend/onnx/onnx_frontend.py
@@ -1820,6 +1820,17 @@ class OneHot(OnnxOpConverter):
         return bb.emit_te(topi.one_hot, indices, on_value, off_value, depth, 
axis, dtype)
 
 
+class Elu(OnnxOpConverter):
+    """Converts an onnx Elu node into an equivalent Relax expression."""
+
+    @classmethod
+    def _impl_v1(cls, bb, inputs, attr, params):
+        alpha = float(attr.get("alpha", 1.0))
+        return relax.expr.const(-alpha) * relax.op.nn.relu(
+            relax.expr.const(1.0) - relax.op.exp(inputs[0])
+        ) + relax.op.nn.relu(inputs[0])
+
+
 def _get_convert_map():
     return {
         "MatMul": MatMul,
@@ -1897,6 +1908,7 @@ def _get_convert_map():
         "Greater": Greater,
         "Reciprocal": Reciprocal,
         "OneHot": OneHot,
+        "Elu": Elu,
     }
 
 
diff --git a/tests/python/relax/test_frontend_onnx.py 
b/tests/python/relax/test_frontend_onnx.py
index a896ebb0b9..8d400eda86 100644
--- a/tests/python/relax/test_frontend_onnx.py
+++ b/tests/python/relax/test_frontend_onnx.py
@@ -543,6 +543,10 @@ def test_triu():
     verify_unary("Trilu", [3, 5, 5], attrs={"upper": True})
 
 
+def test_elu():
+    verify_unary("Elu", [32, 32])
+
+
 def test_conv():
     def _verify_conv(input_shape, weight_shape, output_shape):
         bias_shape = [output_shape[1]]

Reply via email to