From c220df3fb6ca72b4de954c2a4d05267303f4333c Mon Sep 17 00:00:00 2001 From: zerorains Date: Tue, 20 Aug 2024 13:18:27 +0000 Subject: [PATCH 1/3] add the dynamic shape test case for scale_grad, square_grad, transpose_grad and swiglu_grad op --- python/paddle/autograd/backward_utils.py | 4 ++ ...t_prim_sub_graph_backward_dynamic_shape.py | 68 +++++++++++++++++++ 2 files changed, 72 insertions(+) diff --git a/python/paddle/autograd/backward_utils.py b/python/paddle/autograd/backward_utils.py index 92b2cb9e96c66..9db7563dbe08c 100644 --- a/python/paddle/autograd/backward_utils.py +++ b/python/paddle/autograd/backward_utils.py @@ -68,6 +68,10 @@ "pd_op.max", "pd_op.stack", "pd_op.expand", + "pd_op.scale", + "pd_op.square", + "pd_op.transpose", + "pd_op.swiglu", ] diff --git a/test/prim/pir_prim/test_prim_sub_graph_backward_dynamic_shape.py b/test/prim/pir_prim/test_prim_sub_graph_backward_dynamic_shape.py index 788602e081755..91534491f960a 100644 --- a/test/prim/pir_prim/test_prim_sub_graph_backward_dynamic_shape.py +++ b/test/prim/pir_prim/test_prim_sub_graph_backward_dynamic_shape.py @@ -217,6 +217,22 @@ def stack_net3(x): return paddle.stack(x, axis=0) +def scale_net(x): + return paddle.scale(x, scale=-2.3) + + +def square_net(x): + return paddle.square(x) + + +def transpose_net(x): + return paddle.transpose(x, perm=[0, 3, 1, 2]) + + +def swiglu_net(x, y): + return paddle.incubate.nn.functional.swiglu(x, y) + + def apply_to_static(net, use_cinn, input_spec=None): build_strategy = paddle.static.BuildStrategy() build_strategy.build_cinn_pass = use_cinn @@ -2214,5 +2230,57 @@ def setUp(self): self.tol = 1e-6 +class TestPrimScaleWithGrad(TestPrimBaseWithGrad): + def setUp(self): + np.random.seed(2023) + self.dtype = "float32" + self.x_shape = [20, 30, 70] + self.init_x_shape = [None, None, 70] + self.x = np.random.random(self.x_shape).astype(self.dtype) + self.net = scale_net + self.enable_cinn = False + self.tol = 1e-6 + + +class TestPrimSquareWithGrad(TestPrimBaseWithGrad): + def setUp(self): + np.random.seed(2023) + self.dtype = "float32" + self.x_shape = [20, 30, 70] + self.init_x_shape = [None, None, 70] + self.x = np.random.random(self.x_shape).astype(self.dtype) + self.net = square_net + self.enable_cinn = False + self.tol = 1e-6 + + +class TestPrimTransposeWithGrad(TestPrimBaseWithGrad): + def setUp(self): + np.random.seed(2023) + self.dtype = "float32" + self.x_shape = [20, 30, 50, 70] + self.init_x_shape = [None, None, None, 70] + self.x = np.random.random(self.x_shape).astype(self.dtype) + self.net = transpose_net + self.enable_cinn = False + self.tol = 1e-6 + + +class TestPrimSwigluWithGrad(TestPrimBaseOneGradTwoInputs): + def setUp(self): + np.random.seed(2023) + self.dtype = "float32" + self.y_shape = [30, 200, 40] + self.init_y_shape = [None, None, 40] + self.x_shape = [30, 200, 40] + self.init_x_shape = [None, None, 40] + self.x = np.random.random(self.x_shape).astype(self.dtype) + self.y = np.random.random(self.y_shape).astype(self.dtype) + self.net = swiglu_net + self.enable_cinn = False + self.tol = 1e-5 + self.y_without_grad = True + + if __name__ == "__main__": unittest.main() From 7b5659c97d59716f28cc85aa122cf9b4923af8b2 Mon Sep 17 00:00:00 2001 From: zerorains Date: Wed, 21 Aug 2024 03:07:28 +0000 Subject: [PATCH 2/3] add the test case for swiglu --- ...t_prim_sub_graph_backward_dynamic_shape.py | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/test/prim/pir_prim/test_prim_sub_graph_backward_dynamic_shape.py b/test/prim/pir_prim/test_prim_sub_graph_backward_dynamic_shape.py index 91534491f960a..fdc561ee9104c 100644 --- a/test/prim/pir_prim/test_prim_sub_graph_backward_dynamic_shape.py +++ b/test/prim/pir_prim/test_prim_sub_graph_backward_dynamic_shape.py @@ -229,10 +229,14 @@ def transpose_net(x): return paddle.transpose(x, perm=[0, 3, 1, 2]) -def swiglu_net(x, y): +def swiglu_net1(x, y): return paddle.incubate.nn.functional.swiglu(x, y) +def swiglu_net2(x): + return paddle.incubate.nn.functional.swiglu(x) + + def apply_to_static(net, use_cinn, input_spec=None): build_strategy = paddle.static.BuildStrategy() build_strategy.build_cinn_pass = use_cinn @@ -2266,7 +2270,7 @@ def setUp(self): self.tol = 1e-6 -class TestPrimSwigluWithGrad(TestPrimBaseOneGradTwoInputs): +class TestPrimSwigluWithGrad1(TestPrimBaseOneGradTwoInputs): def setUp(self): np.random.seed(2023) self.dtype = "float32" @@ -2276,11 +2280,23 @@ def setUp(self): self.init_x_shape = [None, None, 40] self.x = np.random.random(self.x_shape).astype(self.dtype) self.y = np.random.random(self.y_shape).astype(self.dtype) - self.net = swiglu_net + self.net = swiglu_net1 self.enable_cinn = False self.tol = 1e-5 self.y_without_grad = True +class TestPrimSwigluWithGrad2(TestPrimBaseWithGrad): + def setUp(self): + np.random.seed(2023) + self.dtype = "float32" + self.x_shape = [20, 30, 50, 70] + self.init_x_shape = [None, None, None, 70] + self.x = np.random.random(self.x_shape).astype(self.dtype) + self.net = swiglu_net2 + self.enable_cinn = False + self.tol = 1e-6 + + if __name__ == "__main__": unittest.main() From 0d592b846a87066916e870ea437dbd7b5f61ed27 Mon Sep 17 00:00:00 2001 From: zerorains Date: Mon, 26 Aug 2024 09:38:11 +0000 Subject: [PATCH 3/3] add the main --- .../test_prim_sub_graph_abcde_backward_dynamic_shape.py | 4 ++++ .../test_prim_sub_graph_fghij_backward_dynamic_shape.py | 5 +++++ .../test_prim_sub_graph_klmno_backward_dynamic_shape.py | 6 ++++++ .../test_prim_sub_graph_pqrst_backward_dynamic_shape.py | 4 ++++ 4 files changed, 19 insertions(+) diff --git a/test/prim/pir_prim/test_prim_sub_graph_abcde_backward_dynamic_shape.py b/test/prim/pir_prim/test_prim_sub_graph_abcde_backward_dynamic_shape.py index b94fc1c90ca50..2827d255ded69 100644 --- a/test/prim/pir_prim/test_prim_sub_graph_abcde_backward_dynamic_shape.py +++ b/test/prim/pir_prim/test_prim_sub_graph_abcde_backward_dynamic_shape.py @@ -730,3 +730,7 @@ def setUp(self): self.net = expand_net self.enable_cinn = False self.tol = 1e-6 + + +if __name__ == "__main__": + unittest.main() diff --git a/test/prim/pir_prim/test_prim_sub_graph_fghij_backward_dynamic_shape.py b/test/prim/pir_prim/test_prim_sub_graph_fghij_backward_dynamic_shape.py index 87ea710578fca..55ff82b9a819c 100644 --- a/test/prim/pir_prim/test_prim_sub_graph_fghij_backward_dynamic_shape.py +++ b/test/prim/pir_prim/test_prim_sub_graph_fghij_backward_dynamic_shape.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import unittest import numpy as np from test_prim_sub_graph_backward_dynamic_shape import TestPrimBaseWithGrad @@ -144,3 +145,7 @@ def setUp(self): self.net = hardswish_net self.enable_cinn = False self.tol = 1e-6 + + +if __name__ == "__main__": + unittest.main() diff --git a/test/prim/pir_prim/test_prim_sub_graph_klmno_backward_dynamic_shape.py b/test/prim/pir_prim/test_prim_sub_graph_klmno_backward_dynamic_shape.py index 83822bf8c843f..00bc44498a3f0 100644 --- a/test/prim/pir_prim/test_prim_sub_graph_klmno_backward_dynamic_shape.py +++ b/test/prim/pir_prim/test_prim_sub_graph_klmno_backward_dynamic_shape.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import unittest + import numpy as np from test_prim_sub_graph_backward_dynamic_shape import ( TestPrimBaseWithGrad, @@ -627,3 +629,7 @@ def setUp(self): self.net = multiply_net self.enable_cinn = False self.tol = 1e-5 + + +if __name__ == "__main__": + unittest.main() diff --git a/test/prim/pir_prim/test_prim_sub_graph_pqrst_backward_dynamic_shape.py b/test/prim/pir_prim/test_prim_sub_graph_pqrst_backward_dynamic_shape.py index 0bdd54a44c63a..bc3a5ab5172be 100644 --- a/test/prim/pir_prim/test_prim_sub_graph_pqrst_backward_dynamic_shape.py +++ b/test/prim/pir_prim/test_prim_sub_graph_pqrst_backward_dynamic_shape.py @@ -756,3 +756,7 @@ def setUp(self): self.net = transpose_net self.enable_cinn = False self.tol = 1e-6 + + +if __name__ == "__main__": + unittest.main()