From ddb654d3e32ea79097c6f49a4f3dad4dbf23f8fd Mon Sep 17 00:00:00 2001 From: chenzhiyang <1792266893@qq.com> Date: Tue, 28 Nov 2023 09:23:28 +0000 Subject: [PATCH] fix cross_entropy_with_softmax vjp bug --- .../dialect/op_generator/op_interface_gen.py | 2 +- .../test_softmax_with_cross_entropy_op.py | 24 +++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/paddle/fluid/pir/dialect/op_generator/op_interface_gen.py b/paddle/fluid/pir/dialect/op_generator/op_interface_gen.py index 92881b5d48523..dd2323cd7c3e9 100644 --- a/paddle/fluid/pir/dialect/op_generator/op_interface_gen.py +++ b/paddle/fluid/pir/dialect/op_generator/op_interface_gen.py @@ -149,7 +149,7 @@ def gen_op_vjp_str( index_0 = fwd_outputs_list.index(bw_input_name) else: vjp_param_name = 'out_grads' - grad_idx += 1 + grad_idx = fwd_outputs_list.index(bw_input_name[:-5]) index_0 = grad_idx if op_grad_info.input_optional_list[idx] == 'true': if input_type == 'Tensor': diff --git a/test/legacy_test/test_softmax_with_cross_entropy_op.py b/test/legacy_test/test_softmax_with_cross_entropy_op.py index e2d512707e57d..b0a79084ed118 100644 --- a/test/legacy_test/test_softmax_with_cross_entropy_op.py +++ b/test/legacy_test/test_softmax_with_cross_entropy_op.py @@ -160,11 +160,11 @@ def test_check_grad(self): if core.is_compiled_with_rocm(): if self.python_api is not None: self.check_grad( - ["Logits"], "Loss", max_relative_error=5e-1, check_pir=False + ["Logits"], "Loss", max_relative_error=5e-1, check_pir=True ) # HIP will have accuracy fail when using float32 in CPU place self.check_grad( - ["Logits"], "Loss", max_relative_error=5e-1, check_pir=False + ["Logits"], "Loss", max_relative_error=5e-1, check_pir=True ) else: if self.python_api is not None: @@ -172,10 +172,10 @@ def test_check_grad(self): ["Logits"], "Loss", numeric_grad_delta=0.001, - check_pir=False, + check_pir=True, ) self.check_grad( - ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=False + ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=True ) @@ -517,9 +517,9 @@ def test_check_output(self): def test_check_grad(self): if self.python_api is not None: - self.check_grad(["Logits"], "Loss", check_pir=False) + self.check_grad(["Logits"], "Loss", check_pir=True) self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=False + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True ) @@ -540,10 +540,10 @@ def initParams(self): def test_check_grad(self): if self.python_api is not None: self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=False + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True ) self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=False + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True ) @@ -574,15 +574,15 @@ def test_check_grad(self): # HIP will have accuracy fail when using float32 in CPU place if self.python_api is not None: self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=False + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True ) self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=False + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True ) else: if self.python_api is not None: - self.check_grad(["Logits"], "Loss", check_pir=False) - self.check_grad(["Logits"], "Loss", check_pir=False) + self.check_grad(["Logits"], "Loss", check_pir=True) + self.check_grad(["Logits"], "Loss", check_pir=True) class TestSoftmaxWithCrossEntropyOp3(TestSoftmaxWithCrossEntropyOp):