From 1518711b1fcba9cbaba96abfa0d395f6ec48513e Mon Sep 17 00:00:00 2001 From: logeshwaranmcw Date: Wed, 18 Jun 2025 15:25:17 +0530 Subject: [PATCH 1/2] add support for softsign op --- .../torch/base_fx_graph_translator.py | 7 +++++ .../torch/exported_program_translator.py | 1 + .../test_frontend_from_exported_program.py | 31 +++++++++++++++++++ 3 files changed, 39 insertions(+) diff --git a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py index 485b7c088a15..880fb783f475 100644 --- a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py +++ b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py @@ -331,6 +331,13 @@ def _softplus(self, node: fx.Node) -> relax.Var: beta = node.args[1] if len(node.args) > 1 else node.kwargs.get("beta", 1.0) threshold = node.args[2] if len(node.args) > 2 else node.kwargs.get("threshold", 20.0) return self.block_builder.emit(relax.op.nn.softplus(x, beta, threshold)) + + def _softsign(self, node: fx.Node) -> relax.Var: + x = self.env[node.args[0]] + abs_x = self.block_builder.emit(relax.op.abs(x)) + denom = self.block_builder.emit(relax.op.add(abs_x, relax.const(1.0, dtype="float32"))) + return self.block_builder.emit(relax.op.divide(x, denom)) + def _softshrink(self, node: fx.Node) -> relax.Var: """ diff --git a/python/tvm/relax/frontend/torch/exported_program_translator.py b/python/tvm/relax/frontend/torch/exported_program_translator.py index 57a6577eaf4a..398d135ad16d 100644 --- a/python/tvm/relax/frontend/torch/exported_program_translator.py +++ b/python/tvm/relax/frontend/torch/exported_program_translator.py @@ -377,6 +377,7 @@ def create_convert_map( "softmax.int": self._softmax, "softplus.default": self._softplus, "softshrink.default": self._softshrink, + "softsign.default": self._softsign, "sqrt.default": self._unary_op(relax.op.sqrt), "square.default": self._unary_op(relax.op.square), "tan.default": self._unary_op(relax.op.tan), diff --git a/tests/python/relax/test_frontend_from_exported_program.py b/tests/python/relax/test_frontend_from_exported_program.py index dd04833e07b8..4f38c400ce6c 100644 --- a/tests/python/relax/test_frontend_from_exported_program.py +++ b/tests/python/relax/test_frontend_from_exported_program.py @@ -800,6 +800,37 @@ def main( verify_model(Softmax(), example_args, {}, expected1) verify_model(Softmax2(), example_args, {}, expected1) +def test_softsign(): + class Softsign(Module): + def __init__(self): + super().__init__() + self.ss = torch.nn.Softsign() + + def forward(self, input): + return self.ss(input) + + class Softsign2(Module): + def forward(self, input): + return torch.nn.functional.softsign(input) + + @tvm.script.ir_module + class expected_softsign: + @R.function + def main( + input: R.Tensor((1, 3, 10, 10), dtype="float32") + ) -> R.Tuple(R.Tensor((1, 3, 10, 10), dtype="float32")): + with R.dataflow(): + abs_val = R.abs(input) + denom = R.add(abs_val, R.const(1.0, "float32")) + result = R.divide(input, denom) + gv: R.Tuple(R.Tensor((1, 3, 10, 10), dtype="float32")) = (result,) + R.output(gv) + return gv + + example_args = (torch.randn(1, 3, 10, 10, dtype=torch.float32),) + verify_model(Softsign(), example_args, {}, expected_softsign) + verify_model(Softsign2(), example_args, {}, expected_softsign) + def test_softshrink(): class Softshrink(Module): From d9808aee60297503a34ca9b692ee5f6f904d6209 Mon Sep 17 00:00:00 2001 From: logeshwaranmcw Date: Thu, 19 Jun 2025 13:49:53 +0530 Subject: [PATCH 2/2] formatted the code --- python/tvm/relax/frontend/torch/base_fx_graph_translator.py | 3 +-- tests/python/relax/test_frontend_from_exported_program.py | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py index 880fb783f475..0026ae62a67e 100644 --- a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py +++ b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py @@ -331,13 +331,12 @@ def _softplus(self, node: fx.Node) -> relax.Var: beta = node.args[1] if len(node.args) > 1 else node.kwargs.get("beta", 1.0) threshold = node.args[2] if len(node.args) > 2 else node.kwargs.get("threshold", 20.0) return self.block_builder.emit(relax.op.nn.softplus(x, beta, threshold)) - + def _softsign(self, node: fx.Node) -> relax.Var: x = self.env[node.args[0]] abs_x = self.block_builder.emit(relax.op.abs(x)) denom = self.block_builder.emit(relax.op.add(abs_x, relax.const(1.0, dtype="float32"))) return self.block_builder.emit(relax.op.divide(x, denom)) - def _softshrink(self, node: fx.Node) -> relax.Var: """ diff --git a/tests/python/relax/test_frontend_from_exported_program.py b/tests/python/relax/test_frontend_from_exported_program.py index 4f38c400ce6c..1cf4d87af343 100644 --- a/tests/python/relax/test_frontend_from_exported_program.py +++ b/tests/python/relax/test_frontend_from_exported_program.py @@ -800,6 +800,7 @@ def main( verify_model(Softmax(), example_args, {}, expected1) verify_model(Softmax2(), example_args, {}, expected1) + def test_softsign(): class Softsign(Module): def __init__(self):