This is an automated email from the ASF dual-hosted git repository.
syfeng pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git
The following commit(s) were added to refs/heads/main by this push:
new 6ff3efab70 add support for softsign op (#18075)
6ff3efab70 is described below
commit 6ff3efab702681b45d6ac03dec418fbc116e7577
Author: Logeshwaran <[email protected]>
AuthorDate: Mon Jun 23 08:40:01 2025 +0530
add support for softsign op (#18075)
* add support for softsign op
* formatted the code
---
.../frontend/torch/base_fx_graph_translator.py | 6 ++++
.../frontend/torch/exported_program_translator.py | 1 +
.../relax/test_frontend_from_exported_program.py | 32 ++++++++++++++++++++++
3 files changed, 39 insertions(+)
diff --git a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py
b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py
index 485b7c088a..0026ae62a6 100644
--- a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py
+++ b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py
@@ -332,6 +332,12 @@ class BaseFXGraphImporter(metaclass=abc.ABCMeta):
threshold = node.args[2] if len(node.args) > 2 else
node.kwargs.get("threshold", 20.0)
return self.block_builder.emit(relax.op.nn.softplus(x, beta,
threshold))
+ def _softsign(self, node: fx.Node) -> relax.Var:
+ x = self.env[node.args[0]]
+ abs_x = self.block_builder.emit(relax.op.abs(x))
+ denom = self.block_builder.emit(relax.op.add(abs_x, relax.const(1.0,
dtype="float32")))
+ return self.block_builder.emit(relax.op.divide(x, denom))
+
def _softshrink(self, node: fx.Node) -> relax.Var:
"""
Applies the Softshrink activation function in Relax.
diff --git a/python/tvm/relax/frontend/torch/exported_program_translator.py
b/python/tvm/relax/frontend/torch/exported_program_translator.py
index 57a6577eaf..398d135ad1 100644
--- a/python/tvm/relax/frontend/torch/exported_program_translator.py
+++ b/python/tvm/relax/frontend/torch/exported_program_translator.py
@@ -377,6 +377,7 @@ class ExportedProgramImporter(BaseFXGraphImporter):
"softmax.int": self._softmax,
"softplus.default": self._softplus,
"softshrink.default": self._softshrink,
+ "softsign.default": self._softsign,
"sqrt.default": self._unary_op(relax.op.sqrt),
"square.default": self._unary_op(relax.op.square),
"tan.default": self._unary_op(relax.op.tan),
diff --git a/tests/python/relax/test_frontend_from_exported_program.py
b/tests/python/relax/test_frontend_from_exported_program.py
index dd04833e07..1cf4d87af3 100644
--- a/tests/python/relax/test_frontend_from_exported_program.py
+++ b/tests/python/relax/test_frontend_from_exported_program.py
@@ -801,6 +801,38 @@ def test_softmax():
verify_model(Softmax2(), example_args, {}, expected1)
+def test_softsign():
+ class Softsign(Module):
+ def __init__(self):
+ super().__init__()
+ self.ss = torch.nn.Softsign()
+
+ def forward(self, input):
+ return self.ss(input)
+
+ class Softsign2(Module):
+ def forward(self, input):
+ return torch.nn.functional.softsign(input)
+
+ @tvm.script.ir_module
+ class expected_softsign:
+ @R.function
+ def main(
+ input: R.Tensor((1, 3, 10, 10), dtype="float32")
+ ) -> R.Tuple(R.Tensor((1, 3, 10, 10), dtype="float32")):
+ with R.dataflow():
+ abs_val = R.abs(input)
+ denom = R.add(abs_val, R.const(1.0, "float32"))
+ result = R.divide(input, denom)
+ gv: R.Tuple(R.Tensor((1, 3, 10, 10), dtype="float32")) =
(result,)
+ R.output(gv)
+ return gv
+
+ example_args = (torch.randn(1, 3, 10, 10, dtype=torch.float32),)
+ verify_model(Softsign(), example_args, {}, expected_softsign)
+ verify_model(Softsign2(), example_args, {}, expected_softsign)
+
+
def test_softshrink():
class Softshrink(Module):
def __init__(self):