diff --git a/python/paddle/autograd/function.py b/python/paddle/autograd/function.py index c796969be8a9ba..a8f1890a8637b4 100644 --- a/python/paddle/autograd/function.py +++ b/python/paddle/autograd/function.py @@ -12,4 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .py_layer import PyLayerContext as FunctionCtx # noqa: F401 +from .py_layer import ( + PyLayerContext as FunctionCtx, # noqa: F401 + once_differentiable, # noqa: F401 +) diff --git a/test/legacy_test/test_autograd_function.py b/test/legacy_test/test_autograd_function.py index 4b1312ff9d61ed..5a3578daca8d12 100644 --- a/test/legacy_test/test_autograd_function.py +++ b/test/legacy_test/test_autograd_function.py @@ -674,6 +674,11 @@ def backward(ctx, grad_a, grad_b): b.sum().backward() self.assertEqual(x.grad, paddle.ones([1], dtype="float64")) + def test_once_differentiable_compatibility(self): + pyLayerObj = paddle.autograd.py_layer.once_differentiable + functionObj = paddle.autograd.function.once_differentiable + self.assertEqual(pyLayerObj, functionObj) + if __name__ == '__main__': unittest.main()