Skip to content

Commit b4796cc

Browse files
authored
[Frontend][Paddle] [PaddlePaddle Hackathon 4] add convert support for p_norm/roi_align/softmax_with_cross_entropy (#14826)
* add paddle ops convert - affine_channel - p_norm - roi_align - softmax_with_cross_entropy * delete affine_channel op convert
1 parent 0e24aa7 commit b4796cc

File tree

2 files changed

+165
-0
lines changed

2 files changed

+165
-0
lines changed

python/tvm/relay/frontend/paddlepaddle.py

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1390,6 +1390,22 @@ def convert_one_hot_v2(g, op, block):
13901390
g.add_node(op.output("Out")[0], out)
13911391

13921392

1393+
def convert_p_norm(g, op, blcok):
1394+
"""Operator converter for p_norm."""
1395+
1396+
x = g.get_node(op.input("X")[0])
1397+
axis = op.attr("axis")
1398+
p = op.attr("porder")
1399+
keepdim = op.attr("keepdim")
1400+
p_node = _expr.const(p, dtype="float32")
1401+
abs_node = _op.abs(x)
1402+
pow_node = _op.power(abs_node, p_node)
1403+
reduce_sum = _op.sum(pow_node, axis=[axis], keepdims=keepdim)
1404+
p_node1 = _expr.const(1.0 / p, dtype="float32")
1405+
out = _op.power(reduce_sum, p_node1)
1406+
g.add_node(op.output("Out")[0], out)
1407+
1408+
13931409
def convert_padding(g, op, block):
13941410
"""Operator converter for padding."""
13951411

@@ -1638,6 +1654,30 @@ def convert_reshape(g, op, block):
16381654
g.add_node(op.output("Out")[0], out)
16391655

16401656

1657+
def convert_roi_align(g, op, block):
1658+
"""Operator converter for roi_align."""
1659+
1660+
rois = g.get_node(op.input("ROIs")[0])
1661+
spatial_scale = op.attr("spatial_scale")
1662+
if op.attr("aligned"):
1663+
offset = _expr.const(0.5, dtype="float32")
1664+
roi_offset = _op.divide(offset, _expr.const(spatial_scale, dtype="float32"))
1665+
rois = _op.subtract(rois, roi_offset)
1666+
num_rois = infer_shape(rois)[0]
1667+
zero_node = _expr.const(0, dtype="int32")
1668+
batch_index = _op.full(zero_node, [num_rois, 1], dtype="float32")
1669+
rois = _op.concatenate([batch_index, rois], axis=1)
1670+
out = _op.vision.roi_align(
1671+
g.get_node(op.input("X")[0]),
1672+
rois,
1673+
pooled_size=[op.attr("pooled_height"), op.attr("pooled_width")],
1674+
spatial_scale=spatial_scale,
1675+
sample_ratio=op.attr("sampling_ratio"),
1676+
mode="avg",
1677+
)
1678+
g.add_node(op.output("Out")[0], out)
1679+
1680+
16411681
def convert_rnn(g, op, block):
16421682
"""Operator converter for rnn."""
16431683

@@ -2156,6 +2196,45 @@ def convert_softmax(g, op, block):
21562196
g.add_node(op.output("Out")[0], out)
21572197

21582198

2199+
def convert_softmax_with_cross_entropy(g, op, block):
2200+
"""Operator converter for softmax_with_cross_entropy."""
2201+
2202+
logits = g.get_node(op.input("Logits")[0])
2203+
labels = g.get_node(op.input("Label")[0])
2204+
ignore_index = op.attr("ignore_index")
2205+
axis = op.attr("axis")
2206+
if axis < 0:
2207+
axis = len(infer_shape(logits)) + axis
2208+
2209+
softmax = _op.nn.softmax(logits, axis=axis)
2210+
2211+
g.add_node(op.output("Softmax")[0], softmax)
2212+
2213+
softmax = _op.log(softmax)
2214+
soft_label = op.attr("soft_label")
2215+
if soft_label:
2216+
loss = _op.sum(-labels * softmax, axis=axis)
2217+
else:
2218+
labels_one = _op.one_hot(
2219+
labels,
2220+
on_value=_expr.const(1.0, dtype="float32"),
2221+
off_value=_expr.const(0.0, dtype="float32"),
2222+
depth=infer_shape(logits)[axis],
2223+
axis=axis + 1,
2224+
dtype="float32",
2225+
)
2226+
labels_one = _op.squeeze(labels_one, axis=axis)
2227+
loss = _op.sum(-labels_one * softmax, axis=axis)
2228+
loss = _op.expand_dims(loss, axis=axis)
2229+
if ignore_index != -100: # noly when soft_label is False
2230+
assert not soft_label, "soft_label and ignore_index cannot be set at the same time."
2231+
ignore_mask = _op.not_equal(labels, _expr.const(ignore_index, dtype="int64"))
2232+
ignore_mask = _op.cast(ignore_mask, "float32")
2233+
loss = _op.multiply(loss, ignore_mask)
2234+
2235+
g.add_node(op.output("Loss")[0], loss)
2236+
2237+
21592238
def convert_softplus(g, op, block):
21602239
"""Operator converter for softplus."""
21612240

@@ -2549,6 +2628,7 @@ def convert_where_index(g, op, block):
25492628
"norm": convert_norm,
25502629
"not_equal": convert_elementwise_op,
25512630
"one_hot_v2": convert_one_hot_v2,
2631+
"p_norm": convert_p_norm,
25522632
"pad1d": convert_padding,
25532633
"pad2d": convert_padding,
25542634
"pad3d": convert_padding,
@@ -2561,6 +2641,7 @@ def convert_where_index(g, op, block):
25612641
"relu6": convert_relu6,
25622642
"reshape2": convert_reshape,
25632643
"round": convert_unary_op,
2644+
"roi_align": convert_roi_align,
25642645
"reciprocal": convert_reciprocal,
25652646
"reduce_all": convert_reduce,
25662647
"reduce_any": convert_reduce,
@@ -2584,6 +2665,7 @@ def convert_where_index(g, op, block):
25842665
"size": convert_size,
25852666
"slice": convert_slice,
25862667
"softmax": convert_softmax,
2668+
"softmax_with_cross_entropy": convert_softmax_with_cross_entropy,
25872669
"softplus": convert_softplus,
25882670
"softsign": convert_softsign,
25892671
"softshrink": convert_softshrink,

tests/python/frontend/paddlepaddle/test_forward.py

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2304,5 +2304,88 @@ def forward(self, x, y):
23042304
verify_model(Dist(), input_data=[y, v])
23052305

23062306

2307+
@tvm.testing.uses_gpu
2308+
def test_forward_p_norm():
2309+
class PNorm(nn.Layer):
2310+
def __init__(self, axis, keepdim, p=1):
2311+
super(PNorm, self).__init__()
2312+
self.p = p
2313+
self.axis = axis
2314+
self.keepdim = keepdim
2315+
2316+
@paddle.jit.to_static
2317+
def forward(self, input_data):
2318+
return paddle.norm(input_data, p=self.p, axis=self.axis, keepdim=self.keepdim)
2319+
2320+
input_data = paddle.rand((2, 2, 3), dtype="float32")
2321+
verify_model(PNorm(axis=0, keepdim=True), input_data=input_data)
2322+
verify_model(PNorm(axis=0, keepdim=False), input_data=input_data)
2323+
verify_model(PNorm(axis=1, keepdim=True, p=1.5), input_data=input_data)
2324+
verify_model(PNorm(axis=-1, keepdim=True, p=3.4), input_data=input_data)
2325+
2326+
2327+
@tvm.testing.uses_gpu
2328+
def test_forward_roi_align():
2329+
class RoiAlign(nn.Layer):
2330+
def __init__(self, spatial_scale=1.0, sampling_ratio=-1, aligned=False):
2331+
super(RoiAlign, self).__init__()
2332+
self.spatial_scale = spatial_scale
2333+
self.sampling_ratio = sampling_ratio
2334+
self.aligned = aligned
2335+
2336+
@paddle.jit.to_static
2337+
def forward(self, input_data, rois, rois_num):
2338+
return paddle.vision.ops.roi_align(
2339+
input_data, rois, rois_num, 3, self.spatial_scale, self.sampling_ratio, self.aligned
2340+
)
2341+
2342+
input_data = paddle.rand((1, 128, 32, 32), dtype="float32")
2343+
boxes = paddle.rand([3, 4])
2344+
boxes[:, 2] += boxes[:, 0] + 3
2345+
boxes[:, 3] += boxes[:, 1] + 4
2346+
boxes_num = paddle.to_tensor([3]).astype("int32")
2347+
verify_model(RoiAlign(), input_data=[input_data, boxes, boxes_num])
2348+
verify_model(RoiAlign(aligned=True), input_data=[input_data, boxes, boxes_num])
2349+
verify_model(
2350+
RoiAlign(spatial_scale=2.0, aligned=True), input_data=[input_data, boxes, boxes_num]
2351+
)
2352+
2353+
2354+
@tvm.testing.uses_gpu
2355+
def test_forward_softmax_with_cross_entropy():
2356+
class SoftmaxWithCrossEntropy(nn.Layer):
2357+
def __init__(self, soft_label=False, ignore_index=-100, return_softmax=False, axis=-1):
2358+
super(SoftmaxWithCrossEntropy, self).__init__()
2359+
self.soft_label = soft_label
2360+
self.ignore_index = ignore_index
2361+
self.return_softmax = return_softmax
2362+
self.axis = axis
2363+
2364+
@paddle.jit.to_static
2365+
def forward(self, input_data, label):
2366+
return paddle.nn.functional.softmax_with_cross_entropy(
2367+
input_data,
2368+
label,
2369+
soft_label=self.soft_label,
2370+
ignore_index=self.ignore_index,
2371+
return_softmax=self.return_softmax,
2372+
axis=self.axis,
2373+
)
2374+
2375+
input_data = paddle.rand([5, 3], dtype="float32")
2376+
label = paddle.randint(0, 2, [5, 1])
2377+
verify_model(SoftmaxWithCrossEntropy(), input_data=[input_data, label])
2378+
verify_model(SoftmaxWithCrossEntropy(return_softmax=True), input_data=[input_data, label])
2379+
verify_model(
2380+
SoftmaxWithCrossEntropy(return_softmax=True, ignore_index=1), input_data=[input_data, label]
2381+
)
2382+
input_data = paddle.rand([5, 4, 3], dtype="float32")
2383+
label = paddle.randint(0, 2, [5, 1, 3])
2384+
verify_model(SoftmaxWithCrossEntropy(axis=1), input_data=[input_data, label])
2385+
label = paddle.randint(0, 2, [5, 4, 3]).astype("float32")
2386+
verify_model(SoftmaxWithCrossEntropy(soft_label=True), input_data=[input_data, label])
2387+
verify_model(SoftmaxWithCrossEntropy(soft_label=True, axis=0), input_data=[input_data, label])
2388+
2389+
23072390
if __name__ == "__main__":
23082391
tvm.testing.main()

0 commit comments

Comments
 (0)