From 24e6e9af572fe6f25afae983529e22f82532ee0a Mon Sep 17 00:00:00 2001 From: zha0q1 Date: Tue, 15 Dec 2020 21:04:05 +0000 Subject: [PATCH 1/7] rewrite slice_axis + add test cases for slice_axis and reshape --- .../contrib/onnx/mx2onnx/_op_translations.py | 39 +++++++++++-------- tests/python-pytest/onnx/test_operators.py | 27 +++++++++++-- 2 files changed, 47 insertions(+), 19 deletions(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 4b7c3a5e2e81..f4c50baab8ea 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -1645,30 +1645,37 @@ def convert_cast(node, **kwargs): @mx_op.register("slice_axis") def convert_slice_axis(node, **kwargs): + from onnx.helper import make_node """Map MXNet's slice_axis operator attributes to onnx's Slice operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) - axes = int(attrs.get("axis")) - starts = int(attrs.get("begin")) - ends = attrs.get("end", None) - if not ends or ends == 'None': + axis = int(attrs.get("axis")) + begin = int(attrs.get("begin")) + end = attrs.get("end", None) + + nodes = [] + create_tensor([axis], name+'_axis',kwargs["initializer"]) + create_tensor([begin], name+'_begin',kwargs["initializer"]) + if not end or end == 'None': # ONNX doesn't support None for ends. Since ends=None depicts # length of dimension, passing dimension in this case. - in_shape = kwargs['in_shape'][0] - ends = in_shape[axes] + create_tensor([axis+1], name+"_axis_plus_1", kwargs["initializer"]) + nodes += [ + make_node('Shape', [input_nodes[0]], [name+"_data_shape"]), + make_node('Slice', [name+'_data_shape', name+'_axis', name+'_axis_plus_1'], + [name+"_end"]), + ] + else: + create_tensor([int(end)], name+'_end',kwargs["initializer"]) - node = onnx.helper.make_node( - "Slice", - input_nodes, - [name], - axes=[axes], - starts=[starts], - ends=[int(ends)], - name=name, - ) - return [node] + nodes += [ + make_node('Slice', [input_nodes[0], name+'_begin', name+'_end', name+'_axis'], + [name], name=name) + ] + + return nodes @mx_op.register("SliceChannel") diff --git a/tests/python-pytest/onnx/test_operators.py b/tests/python-pytest/onnx/test_operators.py index 169fc2ea4c99..0640680f39ad 100644 --- a/tests/python-pytest/onnx/test_operators.py +++ b/tests/python-pytest/onnx/test_operators.py @@ -133,11 +133,32 @@ def test_onnx_export_SequenceMask(tmp_path, dtype): op_export_test('SequenceMask_2', M2, [x, seq_len2], tmp_path) -@pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32']) +@pytest.mark.parametrize('dtype', ['float32']) def test_onnx_export_contrib_interleaved_matmul_selfatt_qk(tmp_path, dtype): M1 = def_model('contrib.interleaved_matmul_selfatt_qk', heads=3) - x1 = mx.nd.random.uniform(0, 1, (3, 3, 3*3*3)) + x1 = mx.nd.random.uniform(0, 1, (3, 3, 3*3*3), dtype=dtype) op_export_test('contrib_interleaved_matmul_selfatt_qk_1', M1, [x1], tmp_path) M2 = def_model('contrib.interleaved_matmul_selfatt_qk', heads=5) - x2 = mx.nd.random.uniform(0, 1, (7, 5, 4*5*6)) + x2 = mx.nd.random.uniform(0, 1, (7, 5, 4*5*6), dtype=dtype) op_export_test('contrib_interleaved_matmul_selfatt_qk_2', M2, [x2], tmp_path) + + +@pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32']) +def test_onnx_export_slice_axis(tmp_path, dtype): + x = mx.nd.array([[ 1., 2., 3., 4.], + [ 5., 6., 7., 8.], + [ 9., 10., 11., 12.]], dtype=dtype) + M1 = def_model('slice_axis', axis=0, begin=1, end=3) + M2 = def_model('slice_axis', axis=0, begin=1, end=None) + M3 = def_model('slice_axis', axis=1, begin=-3, end=-1) + op_export_test('slice_axis_1', M1, [x], tmp_path) + op_export_test('slice_axis_2', M2, [x], tmp_path) + op_export_test('slice_axis_3', M3, [x], tmp_path) + +@pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32']) +def test_onnx_export_reshape(tmp_path, dtype): + x = mx.nd.ones((2, 3, 4, 5, 6), dtype=dtype) + M1 = def_model('reshape', shape=(2, 1, 1, -1, 0, 1, 0), reverse=True) + op_export_test('reshape_1', M1, [x], tmp_path) + M2 = def_model('reshape', shape=(6, 1, 0, -1)) + op_export_test('reshape_2', M2, [x], tmp_path) From 2e723499044aee1afcb926cd2fcc5c37e284dd80 Mon Sep 17 00:00:00 2001 From: zha0q1 Date: Tue, 15 Dec 2020 21:46:25 +0000 Subject: [PATCH 2/7] fix sanity --- .../mxnet/contrib/onnx/mx2onnx/_op_translations.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index f4c50baab8ea..e2b01979682f 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -1645,10 +1645,10 @@ def convert_cast(node, **kwargs): @mx_op.register("slice_axis") def convert_slice_axis(node, **kwargs): - from onnx.helper import make_node """Map MXNet's slice_axis operator attributes to onnx's Slice operator and return the created node. """ + from onnx.helper import make_node name, input_nodes, attrs = get_inputs(node, kwargs) axis = int(attrs.get("axis")) @@ -1656,8 +1656,8 @@ def convert_slice_axis(node, **kwargs): end = attrs.get("end", None) nodes = [] - create_tensor([axis], name+'_axis',kwargs["initializer"]) - create_tensor([begin], name+'_begin',kwargs["initializer"]) + create_tensor([axis], name+'_axis', kwargs["initializer"]) + create_tensor([begin], name+'_begin', kwargs["initializer"]) if not end or end == 'None': # ONNX doesn't support None for ends. Since ends=None depicts # length of dimension, passing dimension in this case. @@ -1665,14 +1665,14 @@ def convert_slice_axis(node, **kwargs): nodes += [ make_node('Shape', [input_nodes[0]], [name+"_data_shape"]), make_node('Slice', [name+'_data_shape', name+'_axis', name+'_axis_plus_1'], - [name+"_end"]), + [name+"_end"]) ] else: - create_tensor([int(end)], name+'_end',kwargs["initializer"]) + create_tensor([int(end)], name+'_end', kwargs["initializer"]) nodes += [ make_node('Slice', [input_nodes[0], name+'_begin', name+'_end', name+'_axis'], - [name], name=name) + [name], name=name) ] return nodes From 62507a6dcda60b333760ed170e69b5eef6ff1ded Mon Sep 17 00:00:00 2001 From: zha0q1 Date: Tue, 15 Dec 2020 23:54:55 +0000 Subject: [PATCH 3/7] fix embedding --- .../mxnet/contrib/onnx/mx2onnx/_op_translations.py | 2 +- tests/python-pytest/onnx/test_operators.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index e2b01979682f..5b7f76d4acc9 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -2491,7 +2491,7 @@ def convert_embedding(node, **kwargs): axis = int(attrs.get('axis', 0)) node = onnx.helper.make_node( "Gather", - input_nodes, + [input_nodes[1], input_nodes[0]], [name], axis=axis, name=name diff --git a/tests/python-pytest/onnx/test_operators.py b/tests/python-pytest/onnx/test_operators.py index 0640680f39ad..17c7f127be1c 100644 --- a/tests/python-pytest/onnx/test_operators.py +++ b/tests/python-pytest/onnx/test_operators.py @@ -60,6 +60,7 @@ def onnx_rt(onnx_file, inputs): model.initialize(ctx=mx.cpu(0)) model.hybridize() pred_nat = model(*inputs) + print(pred_nat) onnx_file = export_to_onnx(model, model_name, inputs) pred_onx = onnx_rt(onnx_file, inputs) assert_almost_equal(pred_nat, pred_onx) @@ -155,6 +156,7 @@ def test_onnx_export_slice_axis(tmp_path, dtype): op_export_test('slice_axis_2', M2, [x], tmp_path) op_export_test('slice_axis_3', M3, [x], tmp_path) + @pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32']) def test_onnx_export_reshape(tmp_path, dtype): x = mx.nd.ones((2, 3, 4, 5, 6), dtype=dtype) @@ -162,3 +164,15 @@ def test_onnx_export_reshape(tmp_path, dtype): op_export_test('reshape_1', M1, [x], tmp_path) M2 = def_model('reshape', shape=(6, 1, 0, -1)) op_export_test('reshape_2', M2, [x], tmp_path) + +@pytest.mark.parametrize('dtype', ['int32', 'int64']) +def test_onnx_export_embedding(tmp_path, dtype): + x = mx.nd.array([[ 1., 3.], + [ 0., 2.]], dtype=dtype) + y = mx.nd.array([[ 0., 1., 2., 3., 4.], + [ 5., 6., 7., 8., 9.], + [ 10., 11., 12., 13., 14.], + [ 15., 16., 17., 18., 19.]], dtype=dtype) + M = def_model('Embedding', input_dim=4, output_dim=5) + op_export_test('Embedding', M, [x, y], tmp_path) + From 1a442d467f4e8e8cd3ff69934e75238be381dc40 Mon Sep 17 00:00:00 2001 From: zha0q1 Date: Wed, 16 Dec 2020 01:31:03 +0000 Subject: [PATCH 4/7] revert reverse support for reshape because it was wrong --- python/mxnet/contrib/onnx/mx2onnx/_op_translations.py | 8 +------- tests/python-pytest/onnx/test_operators.py | 8 +++----- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 5b7f76d4acc9..3d0100917a0d 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -1572,14 +1572,8 @@ def convert_reshape(node, **kwargs): reverse = attrs.get('reverse', 'False') output_shape_list = convert_string_to_list(attrs["shape"]) - data_shape = list(kwargs['in_shape'][0]) if reverse == 'True': - output_shape_list.reverse() - data_shape.reverse() - for i, dim in enumerate(output_shape_list): - if dim == 0: - output_shape_list[i] = data_shape[i] - output_shape_list.reverse() + raise NotImplementedError("the reverse option in Reshape is not supported yet.") initializer = kwargs["initializer"] output_shape_np = np.array(output_shape_list, dtype='int64') diff --git a/tests/python-pytest/onnx/test_operators.py b/tests/python-pytest/onnx/test_operators.py index 17c7f127be1c..eaa02f1451d1 100644 --- a/tests/python-pytest/onnx/test_operators.py +++ b/tests/python-pytest/onnx/test_operators.py @@ -60,7 +60,6 @@ def onnx_rt(onnx_file, inputs): model.initialize(ctx=mx.cpu(0)) model.hybridize() pred_nat = model(*inputs) - print(pred_nat) onnx_file = export_to_onnx(model, model_name, inputs) pred_onx = onnx_rt(onnx_file, inputs) assert_almost_equal(pred_nat, pred_onx) @@ -160,10 +159,9 @@ def test_onnx_export_slice_axis(tmp_path, dtype): @pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32']) def test_onnx_export_reshape(tmp_path, dtype): x = mx.nd.ones((2, 3, 4, 5, 6), dtype=dtype) - M1 = def_model('reshape', shape=(2, 1, 1, -1, 0, 1, 0), reverse=True) - op_export_test('reshape_1', M1, [x], tmp_path) - M2 = def_model('reshape', shape=(6, 1, 0, -1)) - op_export_test('reshape_2', M2, [x], tmp_path) + M = def_model('reshape', shape=(6, 1, 0, -1)) + op_export_test('reshape', M, [x], tmp_path) + @pytest.mark.parametrize('dtype', ['int32', 'int64']) def test_onnx_export_embedding(tmp_path, dtype): From 6f868445c8dcfdef5193c54c124b45fa645d7eea Mon Sep 17 00:00:00 2001 From: zha0q1 Date: Wed, 16 Dec 2020 22:48:54 +0000 Subject: [PATCH 5/7] fix reshape and add test cases --- .../contrib/onnx/mx2onnx/_op_translations.py | 74 +++++++++++-------- tests/python-pytest/onnx/test_operators.py | 10 ++- 2 files changed, 49 insertions(+), 35 deletions(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 3d0100917a0d..ccb6560e822f 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -1568,47 +1568,57 @@ def convert_reshape(node, **kwargs): Converts output shape attribute to output shape tensor and return multiple created nodes. """ + from onnx.helper import make_node + name, input_nodes, attrs = get_inputs(node, kwargs) reverse = attrs.get('reverse', 'False') - output_shape_list = convert_string_to_list(attrs["shape"]) - if reverse == 'True': - raise NotImplementedError("the reverse option in Reshape is not supported yet.") - - initializer = kwargs["initializer"] - output_shape_np = np.array(output_shape_list, dtype='int64') - data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[output_shape_np.dtype] - dims = np.shape(output_shape_np) - - output_shape_name = "reshape_attr_tensor" + str(kwargs["idx"]) - tensor_node = onnx.helper.make_tensor_value_info(output_shape_name, data_type, dims) - - initializer.append( - onnx.helper.make_tensor( - name=output_shape_name, - data_type=data_type, - dims=dims, - vals=output_shape_list, - raw=False, - ) - ) - - input_nodes.append(output_shape_name) + targ_shape = convert_string_to_list(attrs["shape"]) not_supported_shape = [-2, -3, -4] - - for val in output_shape_list: + for val in targ_shape: if val in not_supported_shape: raise AttributeError("Reshape: Shape value not supported in ONNX", val) - reshape_node = onnx.helper.make_node( - "Reshape", - input_nodes, - [name], - name=name - ) + create_tensor(targ_shape, name+'_targ_shape', kwargs['initializer']) - return [tensor_node, reshape_node] + nodes = [] + if reverse == 'False': + nodes += [ + make_node('Reshape', [input_nodes[0], name+'_targ_shape'], [name], name=name) + ] + else: + create_tensor([0], name+'_0', kwargs['initializer']) + create_tensor([1], name+'_1', kwargs['initializer']) + nodes += [ + make_node('Shape', [name+'_targ_shape'], [name+'_targ_dim']), + make_node('Shape', [input_nodes[0]], [name+'_orig_shape']), + make_node('Shape', [name+'_orig_shape'], [name+'_orig_dim']), + make_node('Sub', [name+'_targ_dim', name+'_orig_dim'], [name+'_dim_diff']), + make_node('Abs', [name+'_dim_diff'], [name+'_pad_len']), + make_node('Less', [name+'_targ_dim', name+'_orig_dim'], [name+'_targ_less_orig']), + make_node('Less', [name+'_orig_dim', name+'_targ_dim'], [name+'_orig_less_targ']), + make_node('Where', [name+'_targ_less_orig', name+'_pad_len', name+'_0'], + [name+'_targ_pad_len']), + make_node('Where', [name+'_orig_less_targ', name+'_pad_len', name+'_0'], + [name+'_orig_pad_len']), + make_node('Concat', [name+'_targ_pad_len', name+'_0'], [name+'_targ_pads'], axis=0), + make_node('Concat', [name+'_orig_pad_len', name+'_0'], [name+'_orig_pads'], axis=0), + make_node('Pad', [name+'_targ_shape', name+'_targ_pads', name+'_1'], + [name+'_targ_shape_padded'], mode='constant'), + make_node('Pad', [name+'_orig_shape', name+'_orig_pads', name+'_1'], + [name+'_orig_shape_padded'], mode='constant'), + make_node('Equal', [name+'_targ_shape_padded', name+'_0'], + [name+'_targ_shape_0_mask']), + make_node('Where', [name+'_targ_shape_0_mask', name+'_orig_shape_padded', + name+'_targ_shape_padded'], [name+'_targ_shape_new']), + make_node('Shape', [name+'_targ_shape_new'], [name+'_targ_new_dim']), + make_node('Slice', [name+'_targ_shape_new', name+'_targ_pad_len', + name+'_targ_new_dim'], [name+'_targ_shape_final']), + make_node('Reshape', [input_nodes[0], name+'_targ_shape_final'], [name], name=name) + ] + + return nodes @mx_op.register("Cast") def convert_cast(node, **kwargs): diff --git a/tests/python-pytest/onnx/test_operators.py b/tests/python-pytest/onnx/test_operators.py index eaa02f1451d1..27b8b2c19727 100644 --- a/tests/python-pytest/onnx/test_operators.py +++ b/tests/python-pytest/onnx/test_operators.py @@ -156,11 +156,15 @@ def test_onnx_export_slice_axis(tmp_path, dtype): op_export_test('slice_axis_3', M3, [x], tmp_path) -@pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32']) +@pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32', 'int64']) def test_onnx_export_reshape(tmp_path, dtype): x = mx.nd.ones((2, 3, 4, 5, 6), dtype=dtype) - M = def_model('reshape', shape=(6, 1, 0, -1)) - op_export_test('reshape', M, [x], tmp_path) + M1 = def_model('reshape', shape=(6, 1, 0, -1)) + op_export_test('reshape_1', M1, [x], tmp_path) + M2 = def_model('reshape', shape=(3, -1, 0, 0), reverse=True) + op_export_test('reshape_2', M2, [x], tmp_path) + M3 = def_model('reshape', shape=(5, 1, 1, 1, 1, 0 -1, 0), reverse=True) + op_export_test('reshape_3', M3, [x], tmp_path) @pytest.mark.parametrize('dtype', ['int32', 'int64']) From 5fce3f02eaa770e512a94ac2ffc927c2dfb35de3 Mon Sep 17 00:00:00 2001 From: zha0q1 Date: Thu, 17 Dec 2020 05:32:08 +0000 Subject: [PATCH 6/7] fix --- .../contrib/onnx/mx2onnx/_op_translations.py | 61 +++++++++---------- 1 file changed, 30 insertions(+), 31 deletions(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 862b85e4d3ab..d690419c3bb4 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -181,6 +181,7 @@ def create_tensor(shape_list, shape_name, initializer, dtype='int64'): shape_np = np.array(shape_list, dtype=dtype) data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[shape_np.dtype] dims = np.shape(shape_np) + tensor_node = onnx.helper.make_tensor_value_info(shape_name, data_type, dims) initializer.append( onnx.helper.make_tensor( name=shape_name, @@ -190,6 +191,7 @@ def create_tensor(shape_list, shape_name, initializer, dtype='int64'): raw=False ) ) + return tensor_node @mx_op.register("null") def convert_weights_and_inputs(node, **kwargs): @@ -1555,17 +1557,18 @@ def convert_reshape(node, **kwargs): if val in not_supported_shape: raise AttributeError("Reshape: Shape value not supported in ONNX", val) - create_tensor(targ_shape, name+'_targ_shape', kwargs['initializer']) + nodes = [ + create_tensor(targ_shape, name+'_targ_shape', kwargs['initializer']) + ] - nodes = [] if reverse == 'False': nodes += [ make_node('Reshape', [input_nodes[0], name+'_targ_shape'], [name], name=name) ] else: - create_tensor([0], name+'_0', kwargs['initializer']) - create_tensor([1], name+'_1', kwargs['initializer']) nodes += [ + create_tensor([0], name+'_0', kwargs['initializer']), + create_tensor([1], name+'_1', kwargs['initializer']), make_node('Shape', [name+'_targ_shape'], [name+'_targ_dim']), make_node('Shape', [input_nodes[0]], [name+'_orig_shape']), make_node('Shape', [name+'_orig_shape'], [name+'_orig_dim']), @@ -2300,15 +2303,14 @@ def convert_matmul_selfatt_qk(node, **kwargs): heads = int(attrs.get('heads')) # a, b, c, d, e are seq_len, batch_size, num_heads, 3, head_dim respectively - create_tensor([0], name+"_0", kwargs["initializer"]) - create_tensor([1], name+"_1", kwargs["initializer"]) - create_tensor([1], name+"_1_f", kwargs["initializer"], dtype='float32') - create_tensor([2], name+"_2", kwargs["initializer"]) - create_tensor([3], name+"_3", kwargs["initializer"]) - create_tensor([heads], name+"_c", kwargs["initializer"]) - create_tensor([3], name+"_d", kwargs["initializer"]) - nodes = [ + create_tensor([0], name+"_0", kwargs["initializer"]), + create_tensor([1], name+"_1", kwargs["initializer"]), + create_tensor([1], name+"_1_f", kwargs["initializer"], dtype='float32'), + create_tensor([2], name+"_2", kwargs["initializer"]), + create_tensor([3], name+"_3", kwargs["initializer"]), + create_tensor([heads], name+"_c", kwargs["initializer"]), + create_tensor([3], name+"_d", kwargs["initializer"]), make_node('Shape', [input_nodes[0]], [name+"_data_shape"]), make_node('Slice', [name+'_data_shape', name+'_0', name+'_1'], [name+"_a"]), make_node('Slice', [name+'_data_shape', name+'_1', name+'_2'], [name+"_b"]), @@ -2369,14 +2371,13 @@ def convert_broadcast_axis(node, **kwargs): size = convert_string_to_list(attrs.get('size', '()')) assert len(axis) == len(size) - create_tensor([0], name+'_0', kwargs["initializer"]) - create_tensor([1], name+'_1', kwargs["initializer"]) - create_tensor([], name+'_void', kwargs["initializer"]) - create_const_scalar_node(name+'_0_s', np.int64(0), kwargs) - create_const_scalar_node(name+'_1_s', np.int64(1), kwargs) - shape_name = name+'_shape_0' nodes = [ + create_tensor([0], name+'_0', kwargs["initializer"]), + create_tensor([1], name+'_1', kwargs["initializer"]), + create_tensor([], name+'_void', kwargs["initializer"]), + create_const_scalar_node(name+'_0_s', np.int64(0), kwargs), + create_const_scalar_node(name+'_1_s', np.int64(1), kwargs), make_node('Shape', [input_nodes[0]], [shape_name]), make_node('Shape', [shape_name], [name+'_in_dim']), make_node('Reshape', [name+'_in_dim', name+'_void'], [name+'_in_dim_s']), @@ -2385,9 +2386,9 @@ def convert_broadcast_axis(node, **kwargs): for i, axis in enumerate(axis): if axis not in (0, 1): - create_tensor([axis], name+'_'+str(axis), kwargs["initializer"]) - create_tensor([size[i]-1], name+'_size_'+str(i), kwargs["initializer"]) - _ = [ + nodes += [create_tensor([axis], name+'_'+str(axis), kwargs["initializer"])] + nodes += [ + create_tensor([size[i]-1], name+'_size_'+str(i), kwargs["initializer"]), make_node('Equal', [name+'_range', name+'_'+str(axis)], [name+'_equal_'+str(i)]), make_node('Cast', [name+'_equal_'+str(i)], [name+'_cast_'+str(i)], to=int(TensorProto.INT64)), make_node('Mul', [name+'_size_'+str(i), name+'_cast_'+str(i)], [name+'_mul_'+str(i)]), @@ -2395,7 +2396,6 @@ def convert_broadcast_axis(node, **kwargs): make_node('Mul', [name+'_add_'+str(i), shape_name], [name+'_shape_'+str(i+1)]) ] shape_name = name+'_shape_'+str(i+1) - nodes += _ nodes += [make_node('Expand', [input_nodes[0], shape_name], [name], name=name)] @@ -2418,16 +2418,15 @@ def convert_sequencemask(node, **kwargs): if(use_sequence_length == 'False'): return [make_node('Identity', [input_nodes[0]], [name], name=name)] - create_tensor([], name+'_void', kwargs["initializer"]) - create_tensor([0], name+'_0', kwargs["initializer"]) - create_tensor([1], name+'_1', kwargs["initializer"]) - create_tensor([2], name+'_2', kwargs["initializer"]) - create_const_scalar_node(name+'_0_s', np.int64(0), kwargs) - create_const_scalar_node(name+'_1_s', np.int64(1), kwargs) - create_const_scalar_node(name+'_2_s', np.int64(2), kwargs) - create_tensor([mask_val], name+'_mask_val', kwargs["initializer"], dtype='float32') - nodes = [ + create_tensor([], name+'_void', kwargs["initializer"]), + create_tensor([0], name+'_0', kwargs["initializer"]), + create_tensor([1], name+'_1', kwargs["initializer"]), + create_tensor([2], name+'_2', kwargs["initializer"]), + create_const_scalar_node(name+'_0_s', np.int64(0), kwargs), + create_const_scalar_node(name+'_1_s', np.int64(1), kwargs), + create_const_scalar_node(name+'_2_s', np.int64(2), kwargs), + create_tensor([mask_val], name+'_mask_val', kwargs["initializer"], dtype='float32'), make_node('Shape', [input_nodes[0]], [name+'_in_shape']), make_node('Slice', [name+'_in_shape', name+'_0', name+'_1'], [name+'_slice_0']), make_node('Slice', [name+'_in_shape', name+'_1', name+'_2'], [name+'_slice_1']), From b0626017ebb831e522f82194b2ad4a4220422ac6 Mon Sep 17 00:00:00 2001 From: zha0q1 Date: Thu, 17 Dec 2020 08:03:53 +0000 Subject: [PATCH 7/7] fix sanity --- python/mxnet/contrib/onnx/mx2onnx/_op_translations.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index d690419c3bb4..0eef42ab924a 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -178,6 +178,8 @@ def create_const_node(input_name, value, kwargs): return value_node def create_tensor(shape_list, shape_name, initializer, dtype='int64'): + """Helper function to create a tensor value node and a + initializer tensor node with constant value.""" shape_np = np.array(shape_list, dtype=dtype) data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[shape_np.dtype] dims = np.shape(shape_np)