Skip to content

Commit

Permalink
add test
Browse files Browse the repository at this point in the history
  • Loading branch information
zrr1999 committed Oct 23, 2023
1 parent 08832d6 commit b0fbc70
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 8 deletions.
2 changes: 1 addition & 1 deletion python/paddle/tensor/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -5209,7 +5209,7 @@ def logit(x, eps=None, name=None):
"""
if eps is None:
eps = 0.0
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.logit(x, eps)
else:
check_variable_and_dtype(
Expand Down
3 changes: 3 additions & 0 deletions test/legacy_test/test_expand_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,16 @@
import numpy as np
from op_test import OpTest

import paddle
from paddle import base


# Situation 1: expand_times is a list(without tensor)
class TestExpandOpRank1(OpTest):
def setUp(self):
self.op_type = "expand"
self.python_api = paddle.expand
self.public_python_api = paddle.expand
self.init_data()
self.dtype = (
"float32" if base.core.is_compiled_with_rocm() else "float64"
Expand Down
16 changes: 10 additions & 6 deletions test/legacy_test/test_flip.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,10 @@ def init_attrs(self):
self.attrs = {"axis": self.axis}

def test_check_output(self):
self.check_output(check_cinn=True)
self.check_output(check_cinn=True, check_pir=True)

def test_check_grad(self):
self.check_grad(["X"], "Out", check_cinn=True)
self.check_grad(["X"], "Out", check_cinn=True, check_pir=True)

def init_test_case(self):
self.in_shape = (6, 4, 2, 3)
Expand Down Expand Up @@ -167,12 +167,16 @@ def test_check_output(self):
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(place, check_cinn=True)
self.check_output_with_place(
place, check_cinn=True, check_pir=True
)

def test_check_grad(self):
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_grad_with_place(place, ["X"], "Out", check_cinn=True)
self.check_grad_with_place(
place, ["X"], "Out", check_cinn=True, check_pir=True
)

cls_name = "{}_{}".format(parent.__name__, "FP16OP")
TestFlipFP16.__name__ = cls_name
Expand Down Expand Up @@ -202,12 +206,12 @@ def init_dtype(self):
def test_check_output(self):
place = core.CUDAPlace(0)
if core.is_bfloat16_supported(place):
self.check_output_with_place(place)
self.check_output_with_place(place, check_pir=True)

def test_check_grad(self):
place = core.CUDAPlace(0)
if core.is_bfloat16_supported(place):
self.check_grad_with_place(place, ["X"], "Out")
self.check_grad_with_place(place, ["X"], "Out", check_pir=True)

cls_name = "{}_{}".format(parent.__name__, "BF16OP")
TestFlipBF16.__name__ = cls_name
Expand Down
2 changes: 1 addition & 1 deletion test/legacy_test/test_logit_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def set_attrs(self):
self.eps = 1e-8

def test_check_output(self):
self.check_output()
self.check_output(check_pir=True)

def test_check_grad(self):
self.check_grad(['X'], ['Out'], user_defined_grads=[self.x_grad])
Expand Down

0 comments on commit b0fbc70

Please sign in to comment.