Skip to content

Commit a2ec403

Browse files
authored
Mark some APIs as deprecated (PaddlePaddle#77849)
* deprecated API * fix CI for deprecated API * fix test
1 parent 60054c2 commit a2ec403

File tree

4 files changed

+27
-3
lines changed

4 files changed

+27
-3
lines changed

python/paddle/incubate/nn/functional/fused_transformer.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
in_dynamic_mode,
2727
in_dynamic_or_pir_mode,
2828
)
29+
from paddle.utils.deprecated import deprecated
2930

3031
if TYPE_CHECKING:
3132
from collections.abc import Sequence
@@ -509,6 +510,11 @@ def fused_bias_dropout_residual_layer_norm(
509510
return final_out
510511

511512

513+
@deprecated(
514+
since="3.4.0",
515+
level=1,
516+
update_to="paddle.nn.functional.scaled_dot_product_attention",
517+
)
512518
def fused_multi_head_attention(
513519
x: Tensor,
514520
qkv_weight: Tensor,

python/paddle/incubate/nn/functional/masked_multihead_attention.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
from paddle import _C_ops
2020
from paddle.framework import LayerHelper, in_dynamic_or_pir_mode
21+
from paddle.utils.deprecated import deprecated
2122

2223
if TYPE_CHECKING:
2324
from paddle import Tensor
@@ -71,6 +72,11 @@ def masked_multihead_attention(
7172
) -> tuple[Tensor, Tensor, Tensor]: ...
7273

7374

75+
@deprecated(
76+
since="3.4.0",
77+
level=1,
78+
update_to="paddle.nn.functional.scaled_dot_product_attention",
79+
)
7480
def masked_multihead_attention(
7581
x,
7682
cache_kv=None,

python/paddle/incubate/operators/softmax_mask_fuse_upper_triangle.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,17 @@
1818
from paddle import _C_ops
1919
from paddle.base.layer_helper import LayerHelper
2020
from paddle.framework import in_dynamic_or_pir_mode
21+
from paddle.utils.deprecated import deprecated
2122

2223
if TYPE_CHECKING:
2324
from paddle import Tensor
2425

2526

27+
@deprecated(
28+
since="3.4.0",
29+
level=1,
30+
update_to="paddle.nn.functional.scaled_dot_product_attention",
31+
)
2632
def softmax_mask_fuse_upper_triangle(x: Tensor) -> Tensor:
2733
"""
2834
Do a masked softmax on x, which will always mask upper triangle part of x.

test/legacy_test/test_softmax_mask_fuse_upper_triangle_op.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,12 @@
2323

2424
paddle.enable_static()
2525

26+
_softmax_mask_fuse_upper_triangle_python_api = getattr(
27+
paddle.incubate.softmax_mask_fuse_upper_triangle,
28+
'__wrapped__',
29+
paddle.incubate.softmax_mask_fuse_upper_triangle,
30+
)
31+
2632

2733
def _get_softmax_upper(x, fp16=True):
2834
x_lower = np.tril(x)
@@ -44,7 +50,7 @@ def _get_softmax_upper(x, fp16=True):
4450
class TestSoftmaxMaskFuseOp(OpTest):
4551
def setUp(self):
4652
self.op_type = "fused_softmax_mask_upper_triangle"
47-
self.python_api = paddle.incubate.softmax_mask_fuse_upper_triangle
53+
self.python_api = _softmax_mask_fuse_upper_triangle_python_api
4854
x = np.random.random((1, 4, 32, 32)).astype("float16")
4955
self.inputs = {'X': x}
5056
rst = _get_softmax_upper(x)
@@ -68,7 +74,7 @@ def test_check_grad(self):
6874
class TestSoftmaxMaskFuseOp_ZeroSize(TestSoftmaxMaskFuseOp):
6975
def setUp(self):
7076
self.op_type = "fused_softmax_mask_upper_triangle"
71-
self.python_api = paddle.incubate.softmax_mask_fuse_upper_triangle
77+
self.python_api = _softmax_mask_fuse_upper_triangle_python_api
7278
x = np.random.random((1, 1, 0, 32)).astype("float16")
7379
self.inputs = {'X': x}
7480
rst = _get_softmax_upper(x)
@@ -82,7 +88,7 @@ def setUp(self):
8288
class TestSoftmaxMaskFuseOp1(OpTest):
8389
def setUp(self):
8490
self.op_type = "fused_softmax_mask_upper_triangle"
85-
self.python_api = paddle.incubate.softmax_mask_fuse_upper_triangle
91+
self.python_api = _softmax_mask_fuse_upper_triangle_python_api
8692
x = np.random.random((1, 4, 32, 32))
8793
self.inputs = {'X': x}
8894
rst = _get_softmax_upper(x)

0 commit comments

Comments
 (0)