diff --git a/python/paddle/tensor/ops.py b/python/paddle/tensor/ops.py index b09daac5792355..941450d7c8b91c 100644 --- a/python/paddle/tensor/ops.py +++ b/python/paddle/tensor/ops.py @@ -289,7 +289,7 @@ def acosh(x, name=None): Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True, [0. , 1.76274717, 2.06343699, 2.29243159]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.acosh(x) else: check_variable_and_dtype( diff --git a/test/legacy_test/test_activation_op.py b/test/legacy_test/test_activation_op.py index 36ecbffccc1d87..a96f892c3f1bd9 100644 --- a/test/legacy_test/test_activation_op.py +++ b/test/legacy_test/test_activation_op.py @@ -2191,14 +2191,19 @@ def setUp(self): def init_shape(self): self.shape = [10, 12] + def test_check_output(self): + self.check_output(check_pir=True) + def test_check_grad(self): if self.dtype == np.float16: return if self.dtype == np.complex64: # Complex64[CPU]: AssertionError: 0.012431525 not less than or equal to 0.005 - self.check_grad(['X'], 'Out', max_relative_error=0.02) + self.check_grad( + ['X'], 'Out', max_relative_error=0.02, check_pir=True + ) else: - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class TestAcosh_Complex64(TestAcosh): @@ -4724,7 +4729,7 @@ def test_check_grad(self): create_test_act_fp16_class(TestSinh) create_test_act_fp16_class(TestAsin) create_test_act_fp16_class(TestAtan) -create_test_act_fp16_class(TestAcosh) +create_test_act_fp16_class(TestAcosh, check_pir=True) create_test_act_fp16_class(TestAsinh) create_test_act_fp16_class(TestAtanh) create_test_act_fp16_class(TestRound, grad_check=False, check_pir=True) @@ -4878,7 +4883,7 @@ def test_check_grad(self): create_test_act_bf16_class(TestSinh) create_test_act_bf16_class(TestAsin) create_test_act_bf16_class(TestAtan) -create_test_act_bf16_class(TestAcosh) +create_test_act_bf16_class(TestAcosh, check_pir=True) create_test_act_bf16_class(TestAsinh) create_test_act_bf16_class(TestAtanh) create_test_act_bf16_class(TestRound, grad_check=False, check_pir=True)