Skip to content

Commit

Permalink
Adjusted CMakeFiles to support compilation for final state auto gener…
Browse files Browse the repository at this point in the history
…ated codes
  • Loading branch information
jim19930609 committed Jan 25, 2022
1 parent 3723cab commit ca74350
Show file tree
Hide file tree
Showing 6 changed files with 97 additions and 47 deletions.
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
cc_library(scale_node SRCS scale_node.cc DEPS global_utils pten pten_api grad_node_info)
#cc_library(final_dygraph_node SRCS nodes.cc DEPS ${eager_deps})
#add_dependencies(final_dygraph_node eager_final_state_codegen)
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
cc_library(eager_scale SRCS scale.cc DEPS pten_api pten autograd_meta scale_node)
#cc_library(final_dygraph_function SRCS dygraph_functions.cc DEPS ${eager_deps})
#add_dependencies(final_dygraph_function eager_final_state_codegen)
2 changes: 1 addition & 1 deletion paddle/fluid/eager/auto_code_generator/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
add_subdirectory(final_state_generator)
#add_subdirectory(final_state_generator)

set(EAGER_GENERETOR_DEPS ${GLOB_OP_LIB} ${GLOB_OPERATOR_DEPS} pybind proto_desc executor layer tracer engine imperative_profiler imperative_flag)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@ set(api_yaml_path "${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/api.yaml")
set(backward_yaml_path "${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/backward.yaml")
set(tmp_forwards_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.cc")
set(tmp_forwards_h_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.h")
set(tmp_nodes_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_node.cc")
set(tmp_nodes_h_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_node.h")
set(tmp_nodes_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_nodes.cc")
set(tmp_nodes_h_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_nodes.h")
set(forwards_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/forwards/dygraph_functions.cc")
set(forwards_h_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/forwards/dygraph_functions.h")
set(nodes_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/node.cc")
set(nodes_h_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/node.h")
set(nodes_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/nodes.cc")
set(nodes_h_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/nodes.h")

message("Final State Eager CodeGen")
add_custom_target(eager_final_state_codegen
COMMAND "${PYTHON_EXECUTABLE}" "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py"
"--api_yaml_path=${api_yaml_path}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,16 @@ def GetConstReference(string):
return ret


def RemoveConstAndReference(string):
ret = string
if string.startswith("const "):
ret = ret[6:]
if string.endswith("&"):
ret = ret[:-1]

return ret


def GetAutoGradMetaName(string):
return f"{string}_autograd_meta"

Expand Down Expand Up @@ -439,7 +449,7 @@ def GenerateNodeDeclaration(fwd_api_name, backward_fwd_input_map,
{} {} = {};
"""
attribute_members_str += ATTRIBUTE_MEMBER_TEMPLATE.format(
GetConstReference(atype), saved_attr_name, default_val)
RemoveConstAndReference(atype), saved_attr_name, default_val)
# End: SetAttributes & Attribute Members

NODE_DECLARATION_TEMPLATE = """
Expand Down Expand Up @@ -490,53 +500,42 @@ def GenerateNodeDefinition(fwd_api_name, bwd_api_name, backward_fwd_input_map,
for name, (_, is_fwd_input,
grad_api_position), in backward_fwd_input_map.items():
tensor_wrapper_name = GetSavedName(name)
if is_fwd_input:
grad_api_args[
grad_api_position] = f"egr::EagerUtils::SyncToPtenTensors( egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, true) )"
else:
grad_api_args[
grad_api_position] = f"egr::EagerUtils::SyncToPtenTensors( egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, false) )"
grad_api_args[
grad_api_position] = f"egr::EagerUtils::SyncToPtenTensors( egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, nullptr) )"

for _, (_, fwd_position,
grad_api_position) in backward_grad_input_map.items():
grad_api_args[
grad_api_position] = f"egr::EagerUtils::SyncToPtenTensors( *grads[{fwd_position}] )"
grad_api_position] = f"egr::EagerUtils::SyncToPtenTensors( grads[{fwd_position}] )"

for name, _, _, grad_api_position in backward_attrs_list:
saved_attribute_name = GetSavedName(name)
grad_api_args[grad_api_position] = f"this->{saved_attribute_name}"
grad_api_args_str = ", ".join(grad_api_args)

# Construct grad_api returns
num_outputs = len(backward_grad_output_map.keys())
returns_list = ["" for i in range(num_outputs)]
num_bwd_outputs = len(backward_grad_output_map.keys())
returns_str = f"std::vector<std::vector<egr::EagerTensor>> returns({num_bwd_outputs});\n"
for _, (ttype, fwd_position,
grad_api_position) in backward_grad_output_map.items():
# Infer Grad API Return Type
if num_outputs == 1:
if num_bwd_outputs == 1:
# Single tensor output, return as is
if IsPlainTensorType(ttype):
returns_list[0] = "{grad_api_returns}"
returns_str += "returns[0] = { egr::EagerUtils::CreateEagerTensorFromTensor(grad_api_returns) };\n"
else:
assert IsVectorTensorType(ttype)
returns_list[0] = "grad_api_returns"
returns_str += "returns[0] = egr::EagerUtils::CreateEagerTensorFromTensor(grad_api_returns);\n"
else:
# Rearrange output order accordingly
if IsPlainTensorType(ttype):
returns_list[
fwd_position] = f"{{ grad_api_returns[{grad_api_position}] }}"
else:
assert IsVectorTensorType(ttype)
returns_list[
fwd_position] = f"grad_api_returns[{grad_api_position}]"
returns_str = ", ".join(returns_list)
returns_str = f"{{ {returns_str} }}"
returns_str += f"returns[{fwd_position}] = egr::EagerUtils::CreateEagerTensorFromTensor( grad_api_returns[{grad_api_position}] );\n"
returns_str += f"return returns;\n"

FUNCTION_TEMPLATE = """
std::vector<std::vector<egr::EagerTensor>> GradNode{}::operator()(const std::vector<std::vector<egr::EagerTensor>>& grads) {{
// Call grad_api function
auto grad_api_returns = {}({});
return {};
auto grad_api_returns = paddle::experimental::{}({});
{}
}}
"""

Expand Down Expand Up @@ -566,12 +565,12 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,
for name, (ttype, pos) in forward_inputs_position_map.items():
input_autograd_meta_name = GetAutoGradMetaName(name)
if IsPlainTensorType(ttype):
input_autograd_meta = f" egr::EagerTensor* {input_autograd_meta_name} = egr::EagerUtils::nullable_autograd_meta({name});"
input_autograd_meta = f" egr::AutogradMeta* {input_autograd_meta_name} = egr::EagerUtils::nullable_autograd_meta({name});"
else:
assert IsVectorTensorType(ttype)
input_autograd_meta_vec_name = GetAutoGradMetaVectorName(name)
input_autograd_meta = f" std::vector<egr::EagerTensor*> {input_autograd_meta_vec_name} = egr::EagerUtils::nullable_autograd_meta({name});\n"
input_autograd_meta += f" std::vector<egr::EagerTensor*>* {input_autograd_meta_name} = &{input_autograd_meta_vec_name};"
input_autograd_meta = f" std::vector<egr::AutogradMeta*> {input_autograd_meta_vec_name} = egr::EagerUtils::nullable_autograd_meta({name});\n"
input_autograd_meta += f" std::vector<egr::AutogradMeta*>* {input_autograd_meta_name} = &{input_autograd_meta_vec_name};"

inputs_autograd_meta_list.append(input_autograd_meta)
compute_require_grad_args_list.append(input_autograd_meta_name)
Expand All @@ -587,19 +586,19 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,
output_autograd_meta_vec_name = GetAutoGradMetaVectorName(name)
if num_fwd_outputs == 1:
if IsPlainTensorType(rtype):
output_autograd_meta = f" egr::EagerTensor* {output_autograd_meta_name} = egr::EagerUtils::autograd_meta(outputs);"
output_autograd_meta = f" egr::AutogradMeta* {output_autograd_meta_name} = egr::EagerUtils::autograd_meta(&outputs);"
else:
assert IsVectorTensorType(rtype)
output_autograd_meta = f" std::vector<egr::EagerTensor*> {output_autograd_meta_vec_name} = egr::EagerUtils::nullable_autograd_meta({outputs});\n"
output_autograd_meta += f" std::vector<egr::EagerTensor*>* {output_autograd_meta_name} = &{output_autograd_meta_vec_name};"
output_autograd_meta = f" std::vector<egr::AutogradMeta*> {output_autograd_meta_vec_name} = egr::EagerUtils::autograd_meta(&{outputs});\n"
output_autograd_meta += f" std::vector<egr::AutogradMeta*>* {output_autograd_meta_name} = &{output_autograd_meta_vec_name};"
else:
# Tuple api_result
if IsPlainTensorType(rtype):
outputs_autograd_meta = f" egr::EagerTensor* {output_autograd_meta_name} = egr::EagerUtils::autograd_meta(outputs[{pos}]);"
outputs_autograd_meta = f" egr::AutogradMeta* {output_autograd_meta_name} = egr::EagerUtils::autograd_meta(&outputs[{pos}]);"
else:
assert IsVectorTensorType(rtype)
output_autograd_meta = f" std::vector<egr::EagerTensor*> {output_autograd_meta_vec_name} = egr::EagerUtils::nullable_autograd_meta(outputs[{pos}]);\n"
output_autograd_meta += f" std::vector<egr::EagerTensor*>* {output_autograd_meta_name} = &{output_autograd_meta_vec_name};"
output_autograd_meta = f" std::vector<egr::AutogradMeta*> {output_autograd_meta_vec_name} = egr::EagerUtils::autograd_meta(&outputs[{pos}]);\n"
output_autograd_meta += f" std::vector<egr::AutogradMeta*>* {output_autograd_meta_name} = &{output_autograd_meta_vec_name};"

outputs_autograd_meta_list.append(output_autograd_meta)
pass_stop_gradient_args_list.append(output_autograd_meta_name)
Expand All @@ -622,8 +621,11 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,

# SetTensorWrappers
set_tensor_wrappers_list = []
for name, (_, _, _) in backward_fwd_input_map.items():
set_tensor_wrappers = f" grad_node->SetTensorWrapper{name}({name});"
for name, (_, is_fwd_input, _) in backward_fwd_input_map.items():
if is_fwd_input:
set_tensor_wrappers = f" grad_node->SetTensorWrapper{name}({name}, true);"
else:
set_tensor_wrappers = f" grad_node->SetTensorWrapper{name}({name}, false);"
set_tensor_wrappers_list.append(set_tensor_wrappers)
set_tensor_wrappers_str = "\n".join(set_tensor_wrappers_list)

Expand Down Expand Up @@ -747,7 +749,7 @@ def GenerateForwardDefinition(fwd_api_name, bwd_api_name,
inputs_call_args_str = ", ".join(inputs_call_list)

# Forward Full Logic
forward_call_str = f"auto api_result = {fwd_api_name}({inputs_call_args_str});"
forward_call_str = f"auto api_result = paddle::experimental::{fwd_api_name}({inputs_call_args_str});"

# Get return type list & outputs
num_outputs = len(forward_outputs_position_map.keys())
Expand Down Expand Up @@ -814,7 +816,7 @@ def GenerateNodeCCFile(filepath, node_definition_str):
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/eager/utils.h"
#include "paddle/fluid/eager/api/utils/global_utils.h"
#include "paddle/fluid/eager/api/generated/eager_generated/nodes/nodes.h"
#include "paddle/fluid/eager/api/generated/eager_generated/backwards/nodes.h"
"""
file_contents += node_definition_str
Expand All @@ -837,8 +839,8 @@ def GenerateNodeHFile(filepath, node_declaration_str):

def GenerateForwardCCFile(filepath, forward_definition_str):
file_contents = """
#include "paddle/fluid/eager/api/generated/eager_generated/dygraph_forward_api.h"
#include "paddle/fluid/eager/api/generated/eager_generated/nodes/nodes.h"
#include "paddle/fluid/eager/api/generated/eager_generated/forwards/dygraph_functions.h"
#include "paddle/fluid/eager/api/generated/eager_generated/backwards/nodes.h"
#include "paddle/fluid/eager/api/utils/global_utils.h"
#include "paddle/fluid/eager/legacy/op_runner.h"
Expand Down
49 changes: 46 additions & 3 deletions paddle/fluid/eager/auto_code_generator/generate_file_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,45 @@
import sys
import os

if __name__ == "__main__":
assert len(sys.argv) == 2
eager_dir = sys.argv[1]

def GenerateFileStructureForFinalDygraph(eager_dir):
"""
paddle/fluid/eager
|- generated
| |- CMakeLists.txt
| | "add_subdirectory(forwards), add_subdirectory(backwards)"
|
| |- forwards
| |- "dygraph_functions.cc"
| |- "dygraph_functions.h"
|
| |- backwards
| |- "nodes.cc"
| |- "nodes.h"
"""
# Directory Generation
generated_dir = os.path.join(eager_dir, "api/generated/eager_generated")
forwards_dir = os.path.join(generated_dir, "forwards")
nodes_dir = os.path.join(generated_dir, "backwards")
dirs = [generated_dir, forwards_dir, nodes_dir]
for directory in dirs:
if not os.path.exists(directory):
os.mkdir(directory)

# Empty files
dygraph_forward_api_h_path = os.path.join(generated_dir,
"dygraph_functions.h")
empty_files = [dygraph_forward_api_h_path]
empty_files.append(os.path.join(forwards_dir, "dygraph_functions.cc"))
empty_files.append(os.path.join(nodes_dir, "nodes.cc"))
empty_files.append(os.path.join(nodes_dir, "nodes.h"))

for path in empty_files:
if not os.path.exists(path):
open(path, 'a').close()


def GenerateFileStructureForIntermediateDygraph(eager_dir):
"""
paddle/fluid/eager
|- generated
Expand Down Expand Up @@ -79,3 +115,10 @@

with open(generated_level_cmakelist_path, "w") as f:
f.write("add_subdirectory(forwards)\nadd_subdirectory(nodes)")


if __name__ == "__main__":
assert len(sys.argv) == 2
eager_dir = sys.argv[1]
GenerateFileStructureForIntermediateDygraph(eager_dir)
GenerateFileStructureForFinalDygraph(eager_dir)

0 comments on commit ca74350

Please sign in to comment.