Skip to content

Commit dff7725

Browse files
Manfredssclaude
andauthored
[API Compatibility] Add conv_transpose decorator and alias, test conv2d_transpose only -part (PaddlePaddle#78475)
* Add conv_transpose decorator and alias, test conv2d_transpose only Split from PaddlePaddle#78222 to isolate Windows-inference CI crash (0xc0000409). This PR tests only conv2d_transpose compatibility. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> * add conv2d_transpose compatibility test Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com> * fix code style * reduce conv2d_transpose test tensor sizes to avoid Windows-Inference crash Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> --------- Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 86de408 commit dff7725

File tree

3 files changed

+90
-0
lines changed

3 files changed

+90
-0
lines changed

python/paddle/nn/functional/__init__.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,9 @@
175175
)
176176

177177
logsigmoid = log_sigmoid
178+
conv_transpose1d = conv1d_transpose
179+
conv_transpose2d = conv2d_transpose
180+
conv_transpose3d = conv3d_transpose
178181
__all__ = [
179182
'celu',
180183
'conv1d',
@@ -183,6 +186,9 @@
183186
'conv2d_transpose',
184187
'conv3d',
185188
'conv3d_transpose',
189+
'conv_transpose1d',
190+
'conv_transpose2d',
191+
'conv_transpose3d',
186192
'pairwise_distance',
187193
'elu',
188194
'elu_',

python/paddle/nn/functional/conv.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -954,6 +954,7 @@ def conv2d(
954954
)
955955

956956

957+
@param_one_alias(["x", "input"])
957958
def conv1d_transpose(
958959
x: Tensor,
959960
weight: Tensor,
@@ -1222,6 +1223,7 @@ def conv1d_transpose(
12221223
return out
12231224

12241225

1226+
@param_one_alias(["x", "input"])
12251227
def conv2d_transpose(
12261228
x: Tensor,
12271229
weight: Tensor,
@@ -1725,6 +1727,7 @@ def conv3d(
17251727
)
17261728

17271729

1730+
@param_one_alias(["x", "input"])
17281731
def conv3d_transpose(
17291732
x: Tensor,
17301733
weight: Tensor,

test/legacy_test/test_api_compatibility_part3.py

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1336,5 +1336,86 @@ def test_static_Compatibility(self):
13361336
)
13371337

13381338

1339+
# Test conv2d_transpose / conv_transpose2d compatibility
1340+
class TestConv2dTransposeAPI(unittest.TestCase):
1341+
def setUp(self):
1342+
np.random.seed(2025)
1343+
self.dtype = 'float32'
1344+
self.np_x = np.random.rand(1, 2, 4, 4).astype(self.dtype)
1345+
self.np_weight = np.random.rand(2, 2, 3, 3).astype(self.dtype)
1346+
self.np_bias = np.random.rand(2).astype(self.dtype)
1347+
1348+
def test_dygraph_Compatibility(self):
1349+
paddle.disable_static()
1350+
x = paddle.to_tensor(self.np_x)
1351+
weight = paddle.to_tensor(self.np_weight)
1352+
bias = paddle.to_tensor(self.np_bias)
1353+
1354+
# 1. Paddle Positional arguments
1355+
out1 = paddle.nn.functional.conv2d_transpose(x, weight)
1356+
# 2. Paddle keyword arguments
1357+
out2 = paddle.nn.functional.conv2d_transpose(x=x, weight=weight)
1358+
# 3. PyTorch keyword arguments (alias: input)
1359+
out3 = paddle.nn.functional.conv2d_transpose(input=x, weight=weight)
1360+
# 4. PyTorch function name alias
1361+
out4 = paddle.nn.functional.conv_transpose2d(x, weight)
1362+
# 5. PyTorch function name alias + PyTorch keyword
1363+
out5 = paddle.nn.functional.conv_transpose2d(input=x, weight=weight)
1364+
# 6. Mixed arguments (positional + keyword)
1365+
out6 = paddle.nn.functional.conv2d_transpose(
1366+
x, weight, bias=bias, stride=1, padding=0
1367+
)
1368+
# 7. Positional arguments with bias
1369+
out7 = paddle.nn.functional.conv2d_transpose(x, weight, bias)
1370+
1371+
# Verify outputs without bias
1372+
ref = out1.numpy()
1373+
for out in [out2, out3, out4, out5]:
1374+
np.testing.assert_allclose(out.numpy(), ref, rtol=1e-5)
1375+
1376+
# Verify outputs with bias
1377+
ref_bias = out6.numpy()
1378+
np.testing.assert_allclose(out7.numpy(), ref_bias, rtol=1e-5)
1379+
1380+
paddle.enable_static()
1381+
1382+
def test_static_Compatibility(self):
1383+
paddle.enable_static()
1384+
main = paddle.static.Program()
1385+
startup = paddle.static.Program()
1386+
with paddle.static.program_guard(main, startup):
1387+
x = paddle.static.data(
1388+
name="x", shape=[1, 2, 4, 4], dtype=self.dtype
1389+
)
1390+
weight = paddle.static.data(
1391+
name="weight", shape=[2, 2, 3, 3], dtype=self.dtype
1392+
)
1393+
1394+
# 1. Paddle Positional arguments
1395+
out1 = paddle.nn.functional.conv2d_transpose(x, weight)
1396+
# 2. Paddle keyword arguments
1397+
out2 = paddle.nn.functional.conv2d_transpose(x=x, weight=weight)
1398+
# 3. PyTorch keyword arguments (alias: input)
1399+
out3 = paddle.nn.functional.conv2d_transpose(input=x, weight=weight)
1400+
# 4. PyTorch function name alias
1401+
out4 = paddle.nn.functional.conv_transpose2d(x, weight)
1402+
# 5. PyTorch function name alias + PyTorch keyword
1403+
out5 = paddle.nn.functional.conv_transpose2d(input=x, weight=weight)
1404+
1405+
exe = paddle.static.Executor()
1406+
fetches = exe.run(
1407+
main,
1408+
feed={
1409+
"x": self.np_x,
1410+
"weight": self.np_weight,
1411+
},
1412+
fetch_list=[out1, out2, out3, out4, out5],
1413+
)
1414+
1415+
# Verify all outputs
1416+
for i in range(1, len(fetches)):
1417+
np.testing.assert_allclose(fetches[0], fetches[i], rtol=1e-5)
1418+
1419+
13391420
if __name__ == "__main__":
13401421
unittest.main()

0 commit comments

Comments
 (0)