Skip to content

Commit

Permalink
normize yaml backward op label (#46028)
Browse files Browse the repository at this point in the history
  • Loading branch information
chenwhql authored Sep 14, 2022
1 parent 6bd2762 commit 6891a4f
Show file tree
Hide file tree
Showing 8 changed files with 322 additions and 323 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,10 @@ def ReadBwdFile(filepath):
ret = {}
if contents is not None:
for content in contents:
assert 'backward_api' in content.keys(), AssertMessage(
'backward_api', content.keys())
if 'backward_api' in content.keys():
api_name = content['backward_api']
assert 'backward_op' in content.keys(), AssertMessage(
'backward_op', content.keys())
if 'backward_op' in content.keys():
api_name = content['backward_op']

ret[api_name] = content
f.close()
Expand Down
5 changes: 2 additions & 3 deletions paddle/fluid/eager/auto_code_generator/generator/eager_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -1485,7 +1485,7 @@ def GenerateHigherOrderNodeCreationCode(self):
if next_grad_api_contents:
# Fake forward_api_contents and backward_api_contents
forward_api_contents = grad_api_contents
forward_api_contents['op'] = forward_api_contents['backward_api']
forward_api_contents['op'] = forward_api_contents['backward_op']
backward_api_contents = next_grad_api_contents

next_node_generator = DygraphFunctionGeneratorBase(
Expand Down Expand Up @@ -1959,8 +1959,7 @@ def GenerateCode(self):
forward_api_contents = backward_api_contents

# Fake forward_api_content
forward_api_contents['op'] = forward_api_contents[
'backward_api']
forward_api_contents['op'] = forward_api_contents['backward_op']
backward_api_contents = next_grad_api_contents

if len(namespace) > 0:
Expand Down
42 changes: 21 additions & 21 deletions paddle/phi/api/yaml/backward.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
- backward_api : atan2_grad
- backward_op : atan2_grad
forward : atan2 (Tensor x, Tensor y) -> Tensor(out)
args : (Tensor x, Tensor y, Tensor out_grad)
output : Tensor(x_grad), Tensor(y_grad)
Expand All @@ -8,7 +8,7 @@
kernel :
func : atan2_grad

- backward_api : cholesky_grad
- backward_op : cholesky_grad
forward : cholesky (Tensor x, bool upper) -> Tensor(out)
args : (Tensor out, Tensor out_grad, bool upper)
output : Tensor(x_grad)
Expand All @@ -18,7 +18,7 @@
kernel :
func : cholesky_grad

- backward_api : cholesky_solve_grad
- backward_op : cholesky_solve_grad
forward : cholesky_solve (Tensor x, Tensor y, bool upper) -> Tensor(out)
args : (Tensor x, Tensor y, Tensor out, Tensor out_grad, bool upper)
output : Tensor(x_grad), Tensor(y_grad)
Expand All @@ -28,7 +28,7 @@
kernel :
func : cholesky_solve_grad

- backward_api : cross_grad
- backward_op : cross_grad
forward : cross (Tensor x, Tensor y, int axis = 9) -> Tensor(out)
args : (Tensor x, Tensor y, Tensor out_grad, int axis)
output : Tensor(x_grad), Tensor(y_grad)
Expand All @@ -39,7 +39,7 @@
func : cross_grad
data_type : out_grad

- backward_api : diag_grad
- backward_op : diag_grad
forward : diag (Tensor x, int offset, float padding_value) -> Tensor(out)
args : (Tensor x, Tensor out_grad, int offset)
output : Tensor(x_grad)
Expand All @@ -51,7 +51,7 @@
data_type : out_grad
no_need_buffer : x

- backward_api : diagonal_grad
- backward_op : diagonal_grad
forward : diagonal (Tensor x, int offset, int axis1, int axis2) -> Tensor(out)
args : (Tensor x, Tensor out_grad, int offset = 0, int axis1 = 0, int axis2 = 1)
output : Tensor(x_grad)
Expand All @@ -63,7 +63,7 @@
data_type : out_grad
no_need_buffer : x

- backward_api : digamma_grad
- backward_op : digamma_grad
forward : digamma (Tensor x) -> Tensor(out)
args : (Tensor x, Tensor out_grad)
output : Tensor(x_grad)
Expand All @@ -73,7 +73,7 @@
kernel :
func : digamma_grad

- backward_api : dist_grad
- backward_op : dist_grad
forward : dist (Tensor x, Tensor y, float p) -> Tensor(out)
args : (Tensor x, Tensor y, Tensor out, Tensor out_grad, float p)
output : Tensor(x_grad), Tensor(y_grad)
Expand All @@ -83,7 +83,7 @@
kernel :
func : dist_grad

- backward_api : dot_grad
- backward_op : dot_grad
forward : dot (Tensor x, Tensor y) -> Tensor(out)
args : (Tensor x, Tensor y, Tensor out_grad)
output : Tensor(x_grad), Tensor(y_grad)
Expand All @@ -94,7 +94,7 @@
func : dot_grad
data_type : out_grad

- backward_api : erf_grad
- backward_op : erf_grad
forward : erf (Tensor x) -> Tensor(out)
args : (Tensor x, Tensor out_grad)
output : Tensor(x_grad)
Expand All @@ -105,7 +105,7 @@
func : erf_grad
data_type : out_grad

- backward_api : erfinv_grad
- backward_op : erfinv_grad
forward : erfinv (Tensor x) -> Tensor(out)
args : (Tensor out, Tensor out_grad)
output : Tensor(x_grad)
Expand All @@ -115,7 +115,7 @@
kernel :
func : erfinv_grad

- backward_api : fft_c2c_grad
- backward_op : fft_c2c_grad
forward: fft_c2c(Tensor x, int64_t[] axes, str normalization, bool forward) -> Tensor(out)
args : (Tensor out_grad, int64_t[] axes, str normalization, bool forward)
output: Tensor(x_grad)
Expand All @@ -125,7 +125,7 @@
kernel :
func : fft_c2c_grad

- backward_api : fft_c2r_grad
- backward_op : fft_c2r_grad
forward: fft_c2r(Tensor x, int64_t[] axes, str normalization, bool forward, int64_t last_dim_size) -> Tensor(out)
args : (Tensor out_grad, int64_t[] axes, str normalization, bool forward, int64_t last_dim_size)
output: Tensor(x_grad)
Expand All @@ -135,7 +135,7 @@
func : fft_c2r_grad
data_type: out_grad

- backward_api : fft_r2c_grad
- backward_op : fft_r2c_grad
forward: fft_r2c(Tensor x, int64_t[] axes, str normalization, bool forward, bool onesided) -> Tensor(out)
args : (Tensor x, Tensor out_grad, int64_t[] axes, str normalization, bool forward, bool onesided)
output: Tensor(x_grad)
Expand All @@ -147,7 +147,7 @@
data_type: out_grad
no_need_buffer: x

- backward_api : graph_send_uv_grad
- backward_op : graph_send_uv_grad
forward : graph_send_uv (Tensor x, Tensor y, Tensor src_index, Tensor dst_index, str message_op = "ADD") -> Tensor(out)
args: (Tensor x, Tensor y, Tensor src_index, Tensor dst_index, Tensor out_grad, str message_op = "ADD")
output : Tensor(x_grad), Tensor(y_grad)
Expand All @@ -158,7 +158,7 @@
func : graph_send_uv_grad
data_type : x

- backward_api : lgamma_grad
- backward_op : lgamma_grad
forward : lgamma(Tensor x) -> Tensor(out)
args : (Tensor x, Tensor out_grad)
output : Tensor(x_grad)
Expand All @@ -168,7 +168,7 @@
kernel :
func : lgamma_grad

- backward_api : mv_grad
- backward_op : mv_grad
forward : mv (Tensor x, Tensor vec) -> Tensor(out)
args : (Tensor x, Tensor vec, Tensor out_grad)
output : Tensor(x_grad), Tensor(vec_grad)
Expand All @@ -178,7 +178,7 @@
kernel :
func : mv_grad

- backward_api : poisson_grad
- backward_op : poisson_grad
forward : poisson (Tensor x) -> Tensor(out)
args : (Tensor out_grad)
output : Tensor(x_grad)
Expand All @@ -188,7 +188,7 @@
kernel :
func : poisson_grad

- backward_api : solve_grad
- backward_op : solve_grad
forward : solve (Tensor x, Tensor y) -> Tensor(out)
args : (Tensor x, Tensor y, Tensor out, Tensor out_grad)
output : Tensor(x_grad), Tensor(y_grad)
Expand All @@ -198,7 +198,7 @@
kernel :
func : solve_grad

- backward_api : trace_grad
- backward_op : trace_grad
forward : trace (Tensor x, int offset, int axis1, int axis2) -> Tensor(out)
args : (Tensor x, Tensor out_grad, int offset, int axis1, int axis2)
output : Tensor(x_grad)
Expand All @@ -210,7 +210,7 @@
data_type : out_grad
no_need_buffer : x

- backward_api : trunc_grad
- backward_op : trunc_grad
forward : trunc (Tensor x) -> Tensor(out)
args : (Tensor out_grad)
output : Tensor(x_grad)
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/api/yaml/generator/backward_api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def __init__(self, backward_item_yaml):
self.no_need_buffer = self.parse_no_need_buffer(backward_item_yaml)

def get_api_name(self, api_item_yaml):
return api_item_yaml['backward_api']
return api_item_yaml['backward_op']

def parse_forward_config(self, forward_config):
# api_name (const Tensor& input, ... , int attr, ...) -> Tensor(out)
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/api/yaml/generator/parse_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def main(api_yaml_path, output_path, backward):
apis = []
else:
apis = [
parse_api_entry(api, "backward_api" if backward else "op")
parse_api_entry(api, "backward_op" if backward else "op")
for api in apis
]

Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/api/yaml/generator/parse_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def parse_api_entry(api_entry: Dict[str, Any], name_field="op"):
api["backward"] = backward

# forward for backward_apis
is_backward_api = name_field == "backward_api"
is_backward_api = name_field == "backward_op"
if is_backward_api:
if "forward" in api_entry:
forward = parse_forward(api_name, api_entry["forward"])
Expand Down
Loading

0 comments on commit 6891a4f

Please sign in to comment.