diff --git a/python/tvm/relay/frontend/paddlepaddle.py b/python/tvm/relay/frontend/paddlepaddle.py index f771e605cafd..d170d1b12015 100755 --- a/python/tvm/relay/frontend/paddlepaddle.py +++ b/python/tvm/relay/frontend/paddlepaddle.py @@ -400,6 +400,60 @@ def convert_conv2d_transpose(g, op, block): g.add_node(op.output("Output")[0], out) +def convert_conv3d(g, op, block): + """Operator converter for conv3d.""" + + dilations = op.attr("dilations") + groups = op.attr("groups") + paddings = op.attr("paddings") + padding_algorithm = op.attr("padding_algorithm") + strides = op.attr("strides") + + kernel = g.get_node(op.input("Filter")[0]) + input_x = g.get_node(op.input("Input")[0]) + out_channels, _, k_d, k_h, k_w = infer_shape(kernel) + if padding_algorithm == "VALID": + paddings = [0, 0, 0] + elif padding_algorithm == "SAME": + dilations = [1, 1, 1] + input_x = autopad(input_x, strides, [k_d, k_h, k_w], dilations) + paddings = [0, 0, 0] + elif padding_algorithm == "EXPLICIT": + if len(paddings) == 3: + paddings = [ + paddings[0], + paddings[1], + paddings[2], + paddings[0], + paddings[1], + paddings[2], + ] + elif len(paddings) == 6: + paddings = [ + paddings[0], + paddings[3], + paddings[1], + paddings[4], + paddings[2], + paddings[5], + ] + else: + msg = 'Value {} in attribute "padding" of operator Conv is not "valid."' + raise tvm.error.OpAttributeInvalid(msg.format(padding_algorithm)) + + out = _op.nn.conv3d( + input_x, + kernel, + strides=strides, + padding=paddings, + dilation=dilations, + groups=groups, + channels=out_channels, + kernel_size=[k_d, k_h, k_w], + ) + g.add_node(op.output("Output")[0], out) + + def convert_dist(g, op, block): """Operator converter for dist.""" @@ -2414,6 +2468,7 @@ def convert_where_index(g, op, block): "concat": convert_concat, "conv2d": convert_conv2d, "conv2d_transpose": convert_conv2d_transpose, + "conv3d": convert_conv3d, "cos": convert_unary_op, "cosh": convert_unary_op, "cumsum": convert_cumsum, diff --git a/tests/python/frontend/paddlepaddle/test_forward.py b/tests/python/frontend/paddlepaddle/test_forward.py index 612c43bb1617..5bdbd68842fb 100755 --- a/tests/python/frontend/paddlepaddle/test_forward.py +++ b/tests/python/frontend/paddlepaddle/test_forward.py @@ -554,6 +554,49 @@ def forward(self, inputs): verify_model(Conv2DTranspose(stride=3, padding="SAME", groups=1), input_data=input_data) +@tvm.testing.uses_gpu +def test_forward_conv3d(): + class Conv3D(nn.Layer): + def __init__(self, stride=1, padding=0, dilation=1, groups=1, padding_mode="zeros"): + super(Conv3D, self).__init__() + self.conv = nn.Conv3D( + 3, + 6, + 3, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + padding_mode=padding_mode, + ) + self.softmax = nn.Softmax() + + @paddle.jit.to_static + def forward(self, inputs): + return self.softmax(self.conv(inputs)) + + input_shapes = [[1, 3, 10, 10, 10], [1, 3, 12, 12, 12]] + + for input_shape in input_shapes: + input_data = paddle.rand(input_shape, dtype="float32") + verify_model(Conv3D(), input_data=input_data) + verify_model(Conv3D(stride=2, padding="VALID", dilation=3), input_data=input_data) + verify_model(Conv3D(stride=2, padding="SAME", dilation=3), input_data=input_data) + verify_model( + Conv3D(stride=2, padding=(3, 3, 4, 4, 2, 2), dilation=3), + input_data=input_data, + ) + verify_model( + Conv3D(stride=2, padding=3, dilation=3, padding_mode="reflect"), + input_data=input_data, + ) + verify_model( + Conv3D(stride=2, padding=3, dilation=3, padding_mode="replicate"), + input_data=input_data, + ) + verify_model(Conv3D(stride=2, padding="SAME", dilation=2, groups=3), input_data=input_data) + + @tvm.testing.uses_gpu def test_forward_dot(): class Dot(nn.Layer):