diff --git a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py index f69b5944c5603..ed21e1171c17c 100644 --- a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py +++ b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py @@ -1893,12 +1893,29 @@ def GenerateHigherOrderNodeCreationCode(self): False if self.composite_func_info == {} else True ) - if is_composite_grad_api and next_grad_node_creation_str != '': - next_grad_node_creation_str = f""" + if is_composite_grad_api: + if next_grad_node_creation_str != '': + next_grad_node_creation_str = f""" if (!paddle::prim::PrimCommonUtils::IsEagerPrimEnabled()) {{ {next_grad_node_creation_str} }} """ + else: + if not ( + self.grad_api_contents["backward_op"] in prim_white_list + or is_invoke_forward_api + ): + + next_grad_node_creation_str = f""" + if (!paddle::prim::PrimCommonUtils::IsEagerPrimEnabled()) {{ + if(trace_backward) {{ + PADDLE_THROW(phi::errors::Unavailable( + \"The Op {self.backward_api_name} doesn't have any grad\" + \"op. If you don't intend calculating higher order\" + \"derivatives, please set `create_graph`to False.\")); + }} + }} + """ if next_node_generator is not None: has_higher_order_node = True @@ -1918,6 +1935,7 @@ def GenerateHigherOrderNodeCreationCode(self): \"op. If you don't intend calculating higher order\" \"derivatives, please set `create_graph`to False.\")); }}""" + return ( has_higher_order_node, is_invoke_forward_api,