From f55061018e9a57f9e56779f17245359460607cdd Mon Sep 17 00:00:00 2001 From: Qingchao Shen Date: Mon, 17 Jul 2023 16:38:26 +0800 Subject: [PATCH 1/3] Fix softmax converter about keras --- python/tvm/relay/frontend/keras.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index 1913d4a2681a..345c0622e122 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -131,14 +131,14 @@ def _convert_advanced_activation(inexpr, keras_layer, etab, data_layout, input_s if act_type == "Softmax": axis = keras_layer.axis - dims = len(input_shape) + dims = len(input_shape) if input_shape else 0 if isinstance(axis, list): raise tvm.error.OpAttributeUnImplemented(f"Softmax with axes {axis} is not supported.") if data_layout == "NCHW": - if axis == -1: + if input_shape and axis == -1: axis = 1 else: - axis = axis + 1 if axis < dims - 1 else 1 + axis = axis + 1 if axis <= dims - 1 else 1 return _op.nn.softmax(inexpr, axis=axis) if act_type == "ReLU": if np.isnan(keras_layer.threshold).any(): From 0617f3718bbbbdc2afdffc571e8b3261ea7d2791 Mon Sep 17 00:00:00 2001 From: Qingchao Shen Date: Mon, 17 Jul 2023 16:45:38 +0800 Subject: [PATCH 2/3] add new test cases to capture the bug --- tests/python/frontend/keras/test_forward.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/python/frontend/keras/test_forward.py b/tests/python/frontend/keras/test_forward.py index 50a0e9850559..53e2ca8dbe23 100644 --- a/tests/python/frontend/keras/test_forward.py +++ b/tests/python/frontend/keras/test_forward.py @@ -229,6 +229,13 @@ def test_forward_activations(self, keras_mod): keras_model = keras_mod.models.Model(data, x) verify_keras_frontend(keras_model) verify_keras_frontend(keras_model, need_transpose=False, layout="NHWC") + # Test the input dimension = 1 + data = keras_mod.layers.Input(shape=(11,)) + act_func = keras_mod.layers.Softmax() + x = act_func(data) + keras_model = keras_mod.models.Model(data, x) + verify_keras_frontend(keras_model) + verify_keras_frontend(keras_model, need_transpose=False, layout="NHWC") def test_forward_activations_except(self, keras_mod): """ From ce2780725b6cb2ef2296bb23e7a0ede7ede80e9e Mon Sep 17 00:00:00 2001 From: Qingchao Shen Date: Mon, 17 Jul 2023 20:00:57 +0800 Subject: [PATCH 3/3] Update keras.py --- python/tvm/relay/frontend/keras.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index 345c0622e122..aba4160695fe 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -135,10 +135,12 @@ def _convert_advanced_activation(inexpr, keras_layer, etab, data_layout, input_s if isinstance(axis, list): raise tvm.error.OpAttributeUnImplemented(f"Softmax with axes {axis} is not supported.") if data_layout == "NCHW": - if input_shape and axis == -1: + if dims == 0: + axis = 0 + elif axis == -1: axis = 1 else: - axis = axis + 1 if axis <= dims - 1 else 1 + axis = axis + 1 if axis < dims - 1 else 1 return _op.nn.softmax(inexpr, axis=axis) if act_type == "ReLU": if np.isnan(keras_layer.threshold).any():