Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Frontend][Paddle] [PaddlePaddle Hackathon 4]add attribute support for dropout/hard_sigmoid/pixel_shuffle #14575

Merged
merged 10 commits into from
Apr 12, 2023
14 changes: 11 additions & 3 deletions python/tvm/relay/frontend/paddlepaddle.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,13 @@ def convert_dropout(g, op, block):
"""Operator converter for dropout."""

x = g.get_node(op.input("X")[0])
g.add_node(op.output("Out")[0], x)
dropout_prob = op.attr("dropout_prob")
dropout_implementation = op.attr("dropout_implementation")
if dropout_implementation == "downgrade_in_infer":
out = _op.nn.dropout(x, dropout_prob) * _expr.const(1 - dropout_prob, dtype="float32")
else:
out = _op.nn.dropout(x, dropout_prob)
g.add_node(op.output("Out")[0], out)
Copy link
Contributor

@jiangjiajun jiangjiajun Apr 12, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done.



def convert_dot(g, op, block):
Expand Down Expand Up @@ -897,8 +903,9 @@ def convert_hard_sigmoid(g, op, block):
"""Operator converter for hard_sigmoid."""

slope = op.attr("slope")
offset = op.attr("offset")
x = g.get_node(op.input("X")[0])
out = x * _expr.const(slope) + _expr.const(0.5)
out = x * _expr.const(slope) + _expr.const(offset)
out = _op.clip(out, 0, 1)
g.add_node(op.output("Out")[0], out)

Expand Down Expand Up @@ -1425,7 +1432,8 @@ def convert_pixel_shuffle(g, op, block):

x = g.get_node(op.input("X")[0])
upscale_factor = op.attr("upscale_factor")
out = _op.nn.depth_to_space(x, upscale_factor, mode="CRD")
data_format = op.attr("data_format")
out = _op.nn.depth_to_space(x, block_size=upscale_factor, layout=data_format, mode="CRD")
g.add_node(op.output("Out")[0], out)


Expand Down
25 changes: 23 additions & 2 deletions tests/python/frontend/paddlepaddle/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,10 +617,22 @@ def test_forward_dropout():
def dropout(inputs):
return nn.functional.dropout(inputs)

@paddle.jit.to_static
def dropout1(inputs):
return nn.functional.dropout(inputs, 0.1)

@paddle.jit.to_static
def dropout2(inputs):
return nn.functional.dropout(inputs, 0.1, mode="downscale_in_infer")

input_shape = [1, 3, 10, 10]
input_data = paddle.rand(input_shape, dtype="float32")
verify_model(dropout, input_data=input_data[0, 0])
verify_model(dropout, input_data=input_data)
verify_model(dropout1, input_data=input_data[0, 0])
verify_model(dropout1, input_data=input_data)
verify_model(dropout2, input_data=input_data[0, 0])
verify_model(dropout2, input_data=input_data)


def test_forward_elemwise():
Expand Down Expand Up @@ -1008,9 +1020,13 @@ def test_forward_hard_sigmoid():
def hard_sigmoid(inputs):
return nn.functional.hardsigmoid(inputs)

def hard_sigmoid1(inputs):
return nn.functional.hardsigmoid(inputs, offset=0.6)

input_shape = [1, 3, 10, 10]
input_data = paddle.rand(input_shape, dtype="float32")
verify_model(hard_sigmoid, input_data=input_data)
verify_model(hard_sigmoid1, input_data=input_data)


@tvm.testing.uses_gpu
Expand Down Expand Up @@ -1781,9 +1797,9 @@ def forward(self, input1, input2):
@tvm.testing.uses_gpu
def test_forward_pixel_shuffle():
class PixelShuffle(nn.Layer):
def __init__(self, upscale_factor):
def __init__(self, upscale_factor, data_format="NCHW"):
super(PixelShuffle, self).__init__()
self.pixel_shuffle = paddle.nn.PixelShuffle(upscale_factor)
self.pixel_shuffle = paddle.nn.PixelShuffle(upscale_factor, data_format)

@paddle.jit.to_static
def forward(self, x):
Expand All @@ -1794,6 +1810,11 @@ def forward(self, x):
x = paddle.rand(input_shape, dtype="float32")
verify_model(PixelShuffle(2), x)

input_shapes = [[1, 3, 3, 4], [2, 2, 5, 8]]
for input_shape in input_shapes:
x = paddle.rand(input_shape, dtype="float32")
verify_model(PixelShuffle(2, data_format="NHWC"), x)


@tvm.testing.uses_gpu
def test_forward_prelu():
Expand Down