Skip to content

Commit

Permalink
add debug infomation for build_cinn_pass and graph symbolization
Browse files Browse the repository at this point in the history
  • Loading branch information
thisjiang committed Oct 29, 2021
1 parent 442688a commit ced8df1
Show file tree
Hide file tree
Showing 2 changed files with 56 additions and 18 deletions.
55 changes: 37 additions & 18 deletions paddle/fluid/framework/paddle2cinn/build_cinn_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ using framework::ir::Node;

using GraphNodeVec = std::vector<Node*>;
using GraphNodeSet = std::unordered_set<Node*>;
using GraphNodeMap = std::unordered_map<Node*, Node*>;

namespace {
int ExtractOpRole(const GraphNodeSet& cluster) {
Expand All @@ -62,11 +63,9 @@ int ExtractOpRole(const GraphNodeSet& cluster) {

// Deal with subgraph's feed input var node:
// create a new input var node and it's feed op node
void AddFeedOpAndVar(const std::unordered_set<Node*>& feed_vars,
const GraphNodeSet& cluster,
const std::unordered_map<Node*, Node*>& old_op2new_op,
const std::unordered_map<Node*, Node*>& old_var2new_var,
Graph* graph) {
void AddFeedOpAndVar(const GraphNodeSet& feed_vars, const GraphNodeSet& cluster,
const GraphNodeMap& old_op2new_op,
const GraphNodeMap& old_var2new_var, Graph* graph) {
for (auto* old_var : feed_vars) {
// create feed op
OpDesc desc;
Expand All @@ -76,6 +75,7 @@ void AddFeedOpAndVar(const std::unordered_set<Node*>& feed_vars,

// get new feed var node
auto* var = old_var2new_var.at(old_var);
VLOG(4) << "Add Feed Op before: " << var->Name();

// link feed op and feed var
IR_NODE_LINK_TO(op, var);
Expand All @@ -95,13 +95,12 @@ void AddFeedOpAndVar(const std::unordered_set<Node*>& feed_vars,
// Deal with subgraph's parameter var node:
// create a new input var node, it's data will get by scope,
// so it don't need feed op
void AddParamVar(const std::unordered_set<Node*>& param_vars,
const GraphNodeSet& cluster,
const std::unordered_map<Node*, Node*>& old_op2new_op,
const std::unordered_map<Node*, Node*>& old_var2new_var,
Graph* graph) {
void AddParamVar(const GraphNodeSet& param_vars, const GraphNodeSet& cluster,
const GraphNodeMap& old_op2new_op,
const GraphNodeMap& old_var2new_var, Graph* graph) {
for (auto* old_var : param_vars) {
auto* var = old_var2new_var.at(old_var);
VLOG(4) << "Add Param Var Node: " << var->Name();

for (auto* old_op : old_var->outputs) {
if (cluster.count(old_op)) {
Expand All @@ -113,13 +112,12 @@ void AddParamVar(const std::unordered_set<Node*>& param_vars,

// Deal with subgraph's outputs var node:
// create a new output var node and it's fetch op
void AddOutputVar(const std::unordered_set<Node*>& output_vars,
const GraphNodeSet& cluster,
const std::unordered_map<Node*, Node*>& old_op2new_op,
const std::unordered_map<Node*, Node*>& old_var2new_var,
Graph* graph) {
void AddOutputVar(const GraphNodeSet& output_vars, const GraphNodeSet& cluster,
const GraphNodeMap& old_op2new_op,
const GraphNodeMap& old_var2new_var, Graph* graph) {
for (auto* old_var : output_vars) {
auto* var = old_var2new_var.at(old_var);
VLOG(4) << "Add Output Var Node: " << var->Name();

for (auto* old_op : old_var->inputs) {
if (cluster.count(old_op)) {
Expand All @@ -139,13 +137,13 @@ std::unique_ptr<Graph> CreateNewSubGraph(const GraphNodeSet& cluster,
// the ProgramDesc is useless, so here we pass a temporary object.
auto subgraph = std::make_unique<Graph>(framework::ProgramDesc());

std::unordered_map<Node*, Node*> old_op2new_op;
GraphNodeMap old_op2new_op;
for (auto* op : cluster) {
auto sub_node = subgraph->CreateOpNode(op->Op());
old_op2new_op[op] = sub_node;
}

std::unordered_map<Node*, Node*> old_var2new_var;
GraphNodeMap old_var2new_var;
for (auto* var : cluster_internals) {
PADDLE_ENFORCE_NOT_NULL(var->Var(),
platform::errors::PreconditionNotMet(
Expand All @@ -167,7 +165,7 @@ std::unique_ptr<Graph> CreateNewSubGraph(const GraphNodeSet& cluster,
}
}

std::unordered_set<Node*> need_feed_vars;
GraphNodeSet need_feed_vars;
std::unordered_set<Node *> param_vars, output_vars;
// the subgraph is independently, so here we only need link
// to the node in new subgraph, and discard the link to
Expand Down Expand Up @@ -303,6 +301,8 @@ void AddCinnOpToGraph(const GraphNodeSet& cluster,
auto* cinn_op_node = graph->CreateOpNode(&cinn_op_desc);
// Add new links from or to the the cinn launch op node
AddLinkToCinnOp(cluster_inputs, cluster_outputs, cinn_op_node);

VLOG(4) << "Add op [" << kCinnLaunchOp << "] into graph.";
}

// Removing cluster node and internals node from Graph
Expand Down Expand Up @@ -346,6 +346,16 @@ void SearchAllSubgraphs(Graph* graph) {
std::vector<GraphNodeVec> clusters =
framework::ir::SubgraphDetector(graph, teller)();

auto cluster_debug_info = [](const GraphNodeSet& cluster) {
std::string res = "(";
for (auto* node : cluster) {
res.append(node->Name());
res.append(", ");
}
res.append(")");
return res;
};

auto* cinn_compiler = CinnCompiler::GetInstance();
for (const auto& node_vec : clusters) {
// Classify var node to inputs, outputs, and internals.
Expand All @@ -354,10 +364,19 @@ void SearchAllSubgraphs(Graph* graph) {
GraphNodeSet cluster_inputs, cluster_outputs, cluster_internals;
AnalyseClusterVariables(cluster_set, &cluster_inputs, &cluster_outputs,
&cluster_internals);

VLOG(4) << "Cluster Ops: " << cluster_debug_info(cluster_set);
VLOG(4) << "Cluster input vars: " << cluster_debug_info(cluster_inputs);
VLOG(4) << "Cluster output vars: " << cluster_debug_info(cluster_outputs);
VLOG(4) << "Cluster internal vars: "
<< cluster_debug_info(cluster_internals);

// Create a new subgraph according to the found cluster and
// save it in CinnCompiler
std::string compilation_key = cinn_compiler->AddGraph(CreateNewSubGraph(
cluster_set, cluster_internals, cluster_inputs, cluster_outputs));
VLOG(4) << "Compilation Key: " << compilation_key;

// Replace the found cluster to a new cinn op node
ReplaceSubGraphWithCinnOpNode(cluster_set, cluster_inputs, cluster_outputs,
cluster_internals, compilation_key, graph);
Expand Down
19 changes: 19 additions & 0 deletions paddle/fluid/framework/paddle2cinn/cinn_graph_symbolization.cc
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,21 @@ FeedInfoMap CinnGraphSymbolization::GetFeedInfoMapFromInput() const {
for (auto& feed_pair : input_tensors_) {
const auto& feed_name = feed_pair.first;
const auto* tensor = feed_pair.second;
PADDLE_ENFORCE_NE(tensor, nullptr,
platform::errors::PreconditionNotMet(
"The input variable %s's tensor cannot be NULL,"
"we need the variable's dtype and shape from tensor.",
feed_name.c_str()));

VLOG(4) << "Get feed info from input: " << feed_name;
feed_map[feed_name] = utils::GetCinnFeedInfoFromTensor(*tensor);

PADDLE_ENFORCE_NE(
feed_map[feed_name].shape.size(), 0UL,
platform::errors::PreconditionNotMet(
"The input variable %s's tensor shape cannot be empty,"
"we need the variable's dtype and shape from tensor.",
feed_name.c_str()));
}
return feed_map;
}
Expand Down Expand Up @@ -95,6 +108,12 @@ CinnGraphSymbolization::CreateCinnScope(const FeedInfoMap& feed_map) {
auto parameter_names = GetGraphInputParameterNames();

for (const auto& param_name : parameter_names) {
PADDLE_ENFORCE_GT(
feed_map.count(param_name), 0UL,
platform::errors::NotFound("Cannot find parameter %s from input list,"
"please add the tensor into input.",
param_name.c_str()));

// if cannot find var in graph input, skip.
// scope accepte the CINN format name, so here we need transform
// paddle format name to CINN format.
Expand Down

1 comment on commit ced8df1

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.