diff --git a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py index 485b7c088a15..0026ae62a67e 100644 --- a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py +++ b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py @@ -332,6 +332,12 @@ def _softplus(self, node: fx.Node) -> relax.Var: threshold = node.args[2] if len(node.args) > 2 else node.kwargs.get("threshold", 20.0) return self.block_builder.emit(relax.op.nn.softplus(x, beta, threshold)) + def _softsign(self, node: fx.Node) -> relax.Var: + x = self.env[node.args[0]] + abs_x = self.block_builder.emit(relax.op.abs(x)) + denom = self.block_builder.emit(relax.op.add(abs_x, relax.const(1.0, dtype="float32"))) + return self.block_builder.emit(relax.op.divide(x, denom)) + def _softshrink(self, node: fx.Node) -> relax.Var: """ Applies the Softshrink activation function in Relax. diff --git a/python/tvm/relax/frontend/torch/exported_program_translator.py b/python/tvm/relax/frontend/torch/exported_program_translator.py index 57a6577eaf4a..398d135ad16d 100644 --- a/python/tvm/relax/frontend/torch/exported_program_translator.py +++ b/python/tvm/relax/frontend/torch/exported_program_translator.py @@ -377,6 +377,7 @@ def create_convert_map( "softmax.int": self._softmax, "softplus.default": self._softplus, "softshrink.default": self._softshrink, + "softsign.default": self._softsign, "sqrt.default": self._unary_op(relax.op.sqrt), "square.default": self._unary_op(relax.op.square), "tan.default": self._unary_op(relax.op.tan), diff --git a/tests/python/relax/test_frontend_from_exported_program.py b/tests/python/relax/test_frontend_from_exported_program.py index dd04833e07b8..1cf4d87af343 100644 --- a/tests/python/relax/test_frontend_from_exported_program.py +++ b/tests/python/relax/test_frontend_from_exported_program.py @@ -801,6 +801,38 @@ def main( verify_model(Softmax2(), example_args, {}, expected1) +def test_softsign(): + class Softsign(Module): + def __init__(self): + super().__init__() + self.ss = torch.nn.Softsign() + + def forward(self, input): + return self.ss(input) + + class Softsign2(Module): + def forward(self, input): + return torch.nn.functional.softsign(input) + + @tvm.script.ir_module + class expected_softsign: + @R.function + def main( + input: R.Tensor((1, 3, 10, 10), dtype="float32") + ) -> R.Tuple(R.Tensor((1, 3, 10, 10), dtype="float32")): + with R.dataflow(): + abs_val = R.abs(input) + denom = R.add(abs_val, R.const(1.0, "float32")) + result = R.divide(input, denom) + gv: R.Tuple(R.Tensor((1, 3, 10, 10), dtype="float32")) = (result,) + R.output(gv) + return gv + + example_args = (torch.randn(1, 3, 10, 10, dtype=torch.float32),) + verify_model(Softsign(), example_args, {}, expected_softsign) + verify_model(Softsign2(), example_args, {}, expected_softsign) + + def test_softshrink(): class Softshrink(Module): def __init__(self):