Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions python/tvm/relay/op/contrib/cmsisnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,8 @@ def check_qnn_conv2d(pattern):

return (
conv2d.attrs.out_dtype == "int32"
and conv2d.attrs.padding[2] == 0
and conv2d.attrs.padding[3] == 0
and int(conv2d.attrs.padding[2]) == 0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I believe this is the actual fix. Am I right ? (because others are changes to test code).
What test would fail in this PR without this change ?

I think we could have a simpler test to check whether graph partitioning works with conv2d with padding values for top, left, bottom and right.

and int(conv2d.attrs.padding[3]) == 0
and conv2d_input.checked_type.dtype == "int8"
and conv2d_weight.checked_type.dtype == "int8"
and pattern.checked_type.dtype == "int8"
Expand Down
15 changes: 11 additions & 4 deletions tests/python/contrib/test_cmsisnn/test_conv2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ def make_model(
kernel_w = kernel_shape[w_index]
invar = relay.var("input", shape=shape, dtype=dtype)
p = (0, 0, 0, 0)
if padding == "INVALID":
p = [1, 2, 1, 2]
if padding == "SAME":
p = get_same_padding((shape[1], shape[2]), (kernel_h, kernel_w), dilation, strides)
invar = relay.nn.pad(
Expand Down Expand Up @@ -351,15 +353,19 @@ def parameterize_for_invalid_model(test):
in_dtype = ["uint8", "int8"]
kernel_dtype = ["uint8", "int8"]
kernel_zero_point = [-33, 10, 0]
all_combinations = itertools.product(in_dtype, kernel_dtype, kernel_zero_point)
padding = ["SAME", "INVALID"]
all_combinations = itertools.product(in_dtype, kernel_dtype, kernel_zero_point, padding)
all_combinations = filter(
lambda parameters: not (
parameters[0] == "int8" and parameters[1] == "int8" and parameters[2] == 0
parameters[0] == "int8"
and parameters[1] == "int8"
and parameters[2] == 0
and parameters[3] == "SAME"
),
all_combinations,
)
return pytest.mark.parametrize(
["in_dtype", "kernel_dtype", "kernel_zero_point"],
["in_dtype", "kernel_dtype", "kernel_zero_point", "padding"],
all_combinations,
)(test)

Expand All @@ -370,6 +376,7 @@ def test_invalid_parameters(
in_dtype,
kernel_dtype,
kernel_zero_point,
padding,
):
ifm_shape = (1, 28, 28, 12)
out_channels = 2
Expand Down Expand Up @@ -400,7 +407,7 @@ def test_invalid_parameters(
kernel_scale=kernel_scale,
output_zero_point=output_zero_point,
output_scale=output_scale,
padding="SAME",
padding=padding,
strides=(1, 1),
dilation=(1, 1),
groups=1,
Expand Down