From 760a5554effae8716a907233f76ed2555d71262d Mon Sep 17 00:00:00 2001 From: MayYouBeProsperous Date: Tue, 14 Mar 2023 13:40:39 +0800 Subject: [PATCH 1/4] add conv3d for paddle frontend --- python/tvm/relay/frontend/paddlepaddle.py | 40 +++++++++++++++++ .../frontend/paddlepaddle/test_forward.py | 43 +++++++++++++++++++ 2 files changed, 83 insertions(+) diff --git a/python/tvm/relay/frontend/paddlepaddle.py b/python/tvm/relay/frontend/paddlepaddle.py index f771e605cafd..86e91c14bebd 100755 --- a/python/tvm/relay/frontend/paddlepaddle.py +++ b/python/tvm/relay/frontend/paddlepaddle.py @@ -400,6 +400,46 @@ def convert_conv2d_transpose(g, op, block): g.add_node(op.output("Output")[0], out) +def convert_conv3d(g, op, block): + """Operator converter for conv3d.""" + + dilations = op.attr("dilations") + groups = op.attr("groups") + paddings = op.attr("paddings") + padding_algorithm = op.attr("padding_algorithm") + strides = op.attr("strides") + + kernel = g.get_node(op.input("Filter")[0]) + input_x = g.get_node(op.input("Input")[0]) + out_channels, _, k_d, k_h, k_w = infer_shape(kernel) + if padding_algorithm == "VALID": + paddings = [0, 0, 0] + elif padding_algorithm == "SAME": + dilations = [1, 1 ,1] + input_x = autopad(input_x, strides, [k_d, k_h, k_w], dilations) + paddings = [0, 0, 0] + elif padding_algorithm == "EXPLICIT": + if len(paddings) == 3: + paddings = [paddings[0], paddings[1], paddings[2], paddings[0], paddings[1], paddings[2]] + elif len(paddings) == 6: + paddings = [paddings[0], paddings[3], paddings[1], paddings[4], paddings[2], paddings[5]] + else: + msg = 'Value {} in attribute "padding" of operator Conv is not "valid."' + raise tvm.error.OpAttributeInvalid(msg.format(padding_algorithm)) + + out = _op.nn.conv3d( + input_x, + kernel, + strides=strides, + padding=paddings, + dilation=dilations, + groups=groups, + channels=out_channels, + kernel_size=[k_d, k_h, k_w], + ) + g.add_node(op.output("Output")[0], out) + + def convert_dist(g, op, block): """Operator converter for dist.""" diff --git a/tests/python/frontend/paddlepaddle/test_forward.py b/tests/python/frontend/paddlepaddle/test_forward.py index 612c43bb1617..4abcf473ea5f 100755 --- a/tests/python/frontend/paddlepaddle/test_forward.py +++ b/tests/python/frontend/paddlepaddle/test_forward.py @@ -523,6 +523,49 @@ def forward(self, inputs): verify_model(Conv2D1(stride=2, padding="SAME", dilation=2, groups=3), input_data=input_data) +@tvm.testing.uses_gpu +def test_forward_conv(): + class Conv3D(nn.Layer): + def __init__(self, stride=1, padding=0, dilation=1, groups=1, padding_mode="zeros"): + super(Conv3D, self).__init__() + self.conv = nn.Conv3D( + 3, + 6, + 3, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + padding_mode=padding_mode, + ) + self.softmax = nn.Softmax() + + @paddle.jit.to_static + def forward(self, inputs): + return self.softmax(self.conv(inputs)) + + input_shapes = [[1, 3, 10, 10, 10], [1, 3, 12, 12, 12]] + + for input_shape in input_shapes: + input_data = paddle.rand(input_shape, dtype="float32") + verify_model(Conv3D(), input_data=input_data) + verify_model(Conv3D(stride=2, padding="VALID", dilation=3), input_data=input_data) + verify_model(Conv3D(stride=2, padding="SAME", dilation=3), input_data=input_data) + verify_model( + Conv3D(stride=2, padding=(3, 3, 4, 4, 2, 2), dilation=3), + input_data=input_data, + ) + verify_model( + Conv3D(stride=2, padding=3, dilation=3, padding_mode="reflect"), + input_data=input_data, + ) + verify_model( + Conv3D(stride=2, padding=3, dilation=3, padding_mode="replicate"), + input_data=input_data, + ) + verify_model(Conv3D(stride=2, padding="SAME", dilation=2, groups=3), input_data=input_data) + + @tvm.testing.uses_gpu def test_forward_conv_transpose(): class Conv2DTranspose(nn.Layer): From ec0e6f246fedbffe7684286a37181e8120b9b002 Mon Sep 17 00:00:00 2001 From: MayYouBeProsperous Date: Tue, 14 Mar 2023 14:33:37 +0800 Subject: [PATCH 2/4] codestyle --- python/tvm/relay/frontend/paddlepaddle.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/python/tvm/relay/frontend/paddlepaddle.py b/python/tvm/relay/frontend/paddlepaddle.py index 86e91c14bebd..a876e3cf3f73 100755 --- a/python/tvm/relay/frontend/paddlepaddle.py +++ b/python/tvm/relay/frontend/paddlepaddle.py @@ -415,14 +415,28 @@ def convert_conv3d(g, op, block): if padding_algorithm == "VALID": paddings = [0, 0, 0] elif padding_algorithm == "SAME": - dilations = [1, 1 ,1] + dilations = [1, 1, 1] input_x = autopad(input_x, strides, [k_d, k_h, k_w], dilations) paddings = [0, 0, 0] elif padding_algorithm == "EXPLICIT": if len(paddings) == 3: - paddings = [paddings[0], paddings[1], paddings[2], paddings[0], paddings[1], paddings[2]] + paddings = [ + paddings[0], + paddings[1], + paddings[2], + paddings[0], + paddings[1], + paddings[2], + ] elif len(paddings) == 6: - paddings = [paddings[0], paddings[3], paddings[1], paddings[4], paddings[2], paddings[5]] + paddings = [ + paddings[0], + paddings[3], + paddings[1], + paddings[4], + paddings[2], + paddings[5] + ] else: msg = 'Value {} in attribute "padding" of operator Conv is not "valid."' raise tvm.error.OpAttributeInvalid(msg.format(padding_algorithm)) From 2a34dd98e917e648c5a483cdca0eb26e6f83834a Mon Sep 17 00:00:00 2001 From: MayYouBeProsperous Date: Tue, 14 Mar 2023 14:48:45 +0800 Subject: [PATCH 3/4] codestyle --- python/tvm/relay/frontend/paddlepaddle.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/tvm/relay/frontend/paddlepaddle.py b/python/tvm/relay/frontend/paddlepaddle.py index a876e3cf3f73..61ae413e6414 100755 --- a/python/tvm/relay/frontend/paddlepaddle.py +++ b/python/tvm/relay/frontend/paddlepaddle.py @@ -435,7 +435,7 @@ def convert_conv3d(g, op, block): paddings[1], paddings[4], paddings[2], - paddings[5] + paddings[5], ] else: msg = 'Value {} in attribute "padding" of operator Conv is not "valid."' From 550a756995bd6a8ef81382557f80548314c94361 Mon Sep 17 00:00:00 2001 From: MayYouBeProsperous Date: Tue, 14 Mar 2023 19:10:30 +0800 Subject: [PATCH 4/4] fix bugs --- python/tvm/relay/frontend/paddlepaddle.py | 1 + .../frontend/paddlepaddle/test_forward.py | 64 +++++++++---------- 2 files changed, 33 insertions(+), 32 deletions(-) diff --git a/python/tvm/relay/frontend/paddlepaddle.py b/python/tvm/relay/frontend/paddlepaddle.py index 61ae413e6414..d170d1b12015 100755 --- a/python/tvm/relay/frontend/paddlepaddle.py +++ b/python/tvm/relay/frontend/paddlepaddle.py @@ -2468,6 +2468,7 @@ def convert_where_index(g, op, block): "concat": convert_concat, "conv2d": convert_conv2d, "conv2d_transpose": convert_conv2d_transpose, + "conv3d": convert_conv3d, "cos": convert_unary_op, "cosh": convert_unary_op, "cumsum": convert_cumsum, diff --git a/tests/python/frontend/paddlepaddle/test_forward.py b/tests/python/frontend/paddlepaddle/test_forward.py index 4abcf473ea5f..5bdbd68842fb 100755 --- a/tests/python/frontend/paddlepaddle/test_forward.py +++ b/tests/python/frontend/paddlepaddle/test_forward.py @@ -524,7 +524,38 @@ def forward(self, inputs): @tvm.testing.uses_gpu -def test_forward_conv(): +def test_forward_conv_transpose(): + class Conv2DTranspose(nn.Layer): + def __init__(self, stride=1, padding=0, dilation=1, groups=1, padding_mode="zeros"): + super(Conv2DTranspose, self).__init__() + self.conv = nn.Conv2DTranspose( + 6, + 3, + 3, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + ) + self.softmax = nn.Softmax() + + @paddle.jit.to_static + def forward(self, inputs): + return self.softmax(self.conv(inputs)) + + input_shapes = [[1, 6, 10, 10], [2, 6, 8, 8]] + + for input_shape in input_shapes: + input_data = paddle.rand(input_shape, dtype="float32") + verify_model(Conv2DTranspose(), input_data=input_data) + verify_model(Conv2DTranspose(stride=2, padding="VALID"), input_data=input_data) + verify_model(Conv2DTranspose(stride=2, padding="SAME", dilation=1), input_data=input_data) + verify_model(Conv2DTranspose(stride=2, padding=3), input_data=input_data) + verify_model(Conv2DTranspose(stride=3, padding="SAME", groups=1), input_data=input_data) + + +@tvm.testing.uses_gpu +def test_forward_conv3d(): class Conv3D(nn.Layer): def __init__(self, stride=1, padding=0, dilation=1, groups=1, padding_mode="zeros"): super(Conv3D, self).__init__() @@ -566,37 +597,6 @@ def forward(self, inputs): verify_model(Conv3D(stride=2, padding="SAME", dilation=2, groups=3), input_data=input_data) -@tvm.testing.uses_gpu -def test_forward_conv_transpose(): - class Conv2DTranspose(nn.Layer): - def __init__(self, stride=1, padding=0, dilation=1, groups=1, padding_mode="zeros"): - super(Conv2DTranspose, self).__init__() - self.conv = nn.Conv2DTranspose( - 6, - 3, - 3, - stride=stride, - padding=padding, - dilation=dilation, - groups=groups, - ) - self.softmax = nn.Softmax() - - @paddle.jit.to_static - def forward(self, inputs): - return self.softmax(self.conv(inputs)) - - input_shapes = [[1, 6, 10, 10], [2, 6, 8, 8]] - - for input_shape in input_shapes: - input_data = paddle.rand(input_shape, dtype="float32") - verify_model(Conv2DTranspose(), input_data=input_data) - verify_model(Conv2DTranspose(stride=2, padding="VALID"), input_data=input_data) - verify_model(Conv2DTranspose(stride=2, padding="SAME", dilation=1), input_data=input_data) - verify_model(Conv2DTranspose(stride=2, padding=3), input_data=input_data) - verify_model(Conv2DTranspose(stride=3, padding="SAME", groups=1), input_data=input_data) - - @tvm.testing.uses_gpu def test_forward_dot(): class Dot(nn.Layer):