Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[GLCC]Part-1: Add pylayer op to Support @to_static #56108

Merged
merged 26 commits into from
Aug 21, 2023
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
aa91de8
complete static_pylayer op
MarioLulab Aug 9, 2023
ea5798f
finish static_pylayer op context manager
MarioLulab Aug 9, 2023
a82cb65
finish single test
MarioLulab Aug 9, 2023
e239421
append import path
MarioLulab Aug 9, 2023
18501b7
maybe modify test/ir/inference
MarioLulab Aug 9, 2023
e7a0ee6
percept static_pylayer op in dy2st
MarioLulab Aug 9, 2023
3387842
pre-commit code
MarioLulab Aug 9, 2023
fabfa33
fix python bug
MarioLulab Aug 9, 2023
8e0c64a
Add base func CreateInterpreter
MarioLulab Aug 11, 2023
7ee08a4
replace forward_block and backward_block by blocks
MarioLulab Aug 11, 2023
47ddb57
remove compile dependency of static_pylayer
MarioLulab Aug 11, 2023
022a3ba
modify header file includings
MarioLulab Aug 11, 2023
69656cf
revert dy2st about pylayer
MarioLulab Aug 16, 2023
1e88901
delete debug info
MarioLulab Aug 16, 2023
1172c6e
add doc for static pylayer api
MarioLulab Aug 16, 2023
4cfe0b8
move the logic of renaming into conttext manager
MarioLulab Aug 16, 2023
48415aa
add check in python api
MarioLulab Aug 16, 2023
b12d59d
add check between inside_grads and outside_grads
MarioLulab Aug 16, 2023
1416b87
add test cases
MarioLulab Aug 16, 2023
821b3e6
fix bugs in api and add testcase
MarioLulab Aug 17, 2023
1ae0bc8
delete debug files
MarioLulab Aug 17, 2023
d86890d
replace "static_pylayer" with "pylayer" in cpp
MarioLulab Aug 18, 2023
aa245e8
add some notes and add import from backward.py
MarioLulab Aug 18, 2023
fb0a2f9
move helper function into control_flow_op_helper.h
MarioLulab Aug 18, 2023
a7946f3
Merge branch 'develop' of github.com:MarioLulab/Paddle into luqi/dev_…
MarioLulab Aug 18, 2023
47856d5
fix code style
MarioLulab Aug 18, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions paddle/fluid/framework/op_compatible_info.cc
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ void OpCompatibleMap::InitOpCompatibleMap() {
OpCompatibleType::possible};
op_compatible_map_["conditional_block_infer"] = {"1.6.0",
OpCompatibleType::possible};
op_compatible_map_["static_pylayer"] = {"1.6.0", OpCompatibleType::possible};
Aurelius84 marked this conversation as resolved.
Show resolved Hide resolved
op_compatible_map_["conv2d"] = {"1.6.0", OpCompatibleType::possible};
op_compatible_map_["conv2d_transpose"] = {"1.6.0",
OpCompatibleType::possible};
Expand Down
16 changes: 11 additions & 5 deletions paddle/fluid/operators/controlflow/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,25 @@ if(WITH_UNITY_BUILD)
# Load Unity Build rules for operators in paddle/fluid/operators/controlflow.
include(unity_build_rule.cmake)
endif()
register_operators(EXCLUDES conditional_block_op DEPS naive_executor
standalone_executor)
register_operators(EXCLUDES conditional_block_op static_pylayer_op DEPS
naive_executor standalone_executor)

cc_library(
conditional_block_op
SRCS conditional_block_op.cc
DEPS executor standalone_executor)
cc_library(
static_pylayer_op
SRCS static_pylayer_op.cc
DEPS executor standalone_executor)
Aurelius84 marked this conversation as resolved.
Show resolved Hide resolved
cc_library(
op_variant
SRCS op_variant.cc
DEPS operator proto_desc)
cc_library(
conditional_block_op_helper
SRCS conditional_block_op_helper.cc
DEPS op_variant operator conditional_block_op)
DEPS op_variant operator conditional_block_op static_pylayer_op)
cc_library(
recurrent_op_helper
SRCS recurrent_op_helper.cc
Expand All @@ -28,9 +32,11 @@ cc_library(
DEPS op_variant operator)

if(WITH_UNITY_BUILD)
target_link_libraries(paddle_operators_controlflow_unity conditional_block_op)
target_link_libraries(paddle_operators_controlflow_unity conditional_block_op
static_pylayer_op)
else()
target_link_libraries(conditional_block_infer_op conditional_block_op)
target_link_libraries(conditional_block_infer_op conditional_block_op
static_pylayer_op)
endif()

file(APPEND ${pybind_file}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@ namespace operators {

static bool IsMatchedConditionalBlockOpAndConditionalBlockGradOp(
const OpVariant &fwd_op, const OpVariant &bwd_op) {
// NOTE(MarioLulab): To avoide strip `static_pylayer_op.cc` when linker static
Aurelius84 marked this conversation as resolved.
Show resolved Hide resolved
// link libstatic_pylayer_op.a, we should reference Variable or Function
// defined in libstatic_pylayer_op.a. Remind of that this is a `Temporary`
// method, we will use `op_library` to avoide linker strip.
std::string temp = std::string(StaticPyLayerOp::kInputs);

return fwd_op.Outputs().at(ConditionalOp::kScope) ==
bwd_op.Inputs().at(ConditionalOp::kScope);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/operators/controlflow/conditional_block_op.h"
#include "paddle/fluid/operators/controlflow/op_variant.h"
#include "paddle/fluid/operators/controlflow/static_pylayer_op.h"
Aurelius84 marked this conversation as resolved.
Show resolved Hide resolved
#include "paddle/fluid/string/string_helper.h"

namespace paddle {
Expand Down
279 changes: 279 additions & 0 deletions paddle/fluid/operators/controlflow/static_pylayer_op.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,279 @@
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "paddle/fluid/operators/controlflow/static_pylayer_op.h"

#include "paddle/fluid/framework/new_executor/standalone_executor.h"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
#include "paddle/fluid/framework/new_executor/standalone_executor.h"
#include "paddle/fluid/framework/new_executor/interpretercore.h"

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

有道理。目前只需要依赖 interpretercore.h

#include "paddle/fluid/operators/assign_op.h"
#include "paddle/fluid/operators/controlflow/control_flow_op_helper.h"
#include "paddle/phi/core/flags.h"
#include "paddle/phi/kernels/funcs/math_function.h"

namespace paddle {
namespace operators {

const char StaticPyLayerOp::kInputs[] = "Input";
const char StaticPyLayerOp::kOutputs[] = "Out";
const char StaticPyLayerOp::kScope[] = "Scope";
const char StaticPyLayerOp::kSkipEagerDeletionVars[] =
"skip_eager_deletion_vars";

class StaticPyLayerForwardOp : public StaticPyLayerOp {
public:
StaticPyLayerForwardOp(const std::string &type,
const framework::VariableNameMap &inputs,
const framework::VariableNameMap &outputs,
const framework::AttributeMap &attrs)
: StaticPyLayerOp(type, inputs, outputs, attrs) {}

private:
void RunImpl(const framework::Scope &scope,
const platform::Place &dev_place) const {
auto *scope_var = scope.FindVar(Output(kScope));
PADDLE_ENFORCE_NOT_NULL(
scope_var,
platform::errors::PreconditionNotMet(
"Expect Scope variable to be set in static_pylayer_op, but "
"got a null Scope variable. Please set the Scope variable."));

auto *scopes = scope_var->GetMutable<std::vector<framework::Scope *>>();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

如果Attribute里定义不是vector<Scope*>,那这段code可以省去

scopes->resize(1);
scopes->front() = &scope.NewScope();

auto &cur_scope = *scopes->front();
auto *block = Attr<framework::BlockDesc *>("forward_block");
VLOG(3) << "StaticPyLayer forward_block block.idx = " << block->ID()
<< ", scope = " << &cur_scope;

auto &skip_vars = Attr<std::vector<std::string>>(kSkipEagerDeletionVars);

LOG_FIRST_N(INFO, 1)
<< "[ControlFlow][StaticPyLayer] New Executor is Running.";

if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
Aurelius84 marked this conversation as resolved.
Show resolved Hide resolved
VLOG(10) << "[interpreterCore cache]" << core_.get();
VLOG_IF(10, core_) << platform::is_same_place(core_->GetPlace(),
dev_place);

framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(skip_vars.begin(), skip_vars.end());

core_.reset(new framework::InterpreterCore(
dev_place, *block, &cur_scope, execution_config));
VLOG(10) << "[interpreterCore] created:" << core_;
} else {
// NOTE: Borrowed from
// `paddle/fluid/operators/controlflow/control_flow_op_helper.h`
// TODO(MarioLulab): Add StaticPyLayer Helper ?
BuildScopeForControlFlowOp(*core_, *block, &cur_scope);
core_->reset_scope(&cur_scope);
}

core_->Run({}, false);
}

private:
mutable std::shared_ptr<framework::InterpreterCore> core_{nullptr};
};

class StaticPyLayerForwardInferShape : public framework::InferShapeBase {
public:
void operator()(framework::InferShapeContext *context) const override {
// TODO(MarioLulab): do nothing.
}
};

class StaticPyLayerBackwardOp : public StaticPyLayerOp {
public:
StaticPyLayerBackwardOp(const std::string &type,
const framework::VariableNameMap &inputs,
const framework::VariableNameMap &outputs,
const framework::AttributeMap &attrs)
: StaticPyLayerOp(type, inputs, outputs, attrs) {}

private:
void RunImpl(const framework::Scope &scope,
const platform::Place &dev_place) const override {
const auto &inputs = Inputs(StaticPyLayerOp::kInputs);
const auto &outside_grads =
Outputs(framework::GradVarName(StaticPyLayerOp::kInputs));
std::vector<std::string> inside_grads;
inside_grads.reserve(inputs.size());
for (auto &in : inputs) {
inside_grads.emplace_back(framework::GradVarName(in));
}

auto *scope_var = scope.FindVar(Input(StaticPyLayerOp::kScope));
PADDLE_ENFORCE_NOT_NULL(
scope_var,
platform::errors::PreconditionNotMet(
"Expect Scope variable to be set in static_pylayer_op, but "
"got a null Scope variable. Please set the Scope variable."));
auto &scopes = scope_var->Get<std::vector<framework::Scope *>>();
PADDLE_ENFORCE_GT(
scopes.size(),
0,
platform::errors::InvalidArgument(
"Expect Scope variable contains at least 1 scope, but got: %d",
scopes.size()));
framework::Scope &cur_scope = *(scopes[0]);

// auto *block = Attr<framework::BlockDesc *>("sub_block");
auto *block = Attr<framework::BlockDesc *>("forward_block");
Aurelius84 marked this conversation as resolved.
Show resolved Hide resolved
VLOG(3) << "Static PyLayer backward block.idx = " << block->ID()
<< ", scope = " << &cur_scope;

LOG_FIRST_N(INFO, 1)
<< "[ControlFlow][StaticPyLayerBackwardOp] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
VLOG(10) << "[interpreterCore cache]" << core_.get();
VLOG_IF(10, core_) << platform::is_same_place(core_->GetPlace(),
dev_place);

framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(inside_grads.begin(), inside_grads.end());

core_.reset(new framework::InterpreterCore(
dev_place, *block, &cur_scope, execution_config));
VLOG(10) << "[interpreterCore] created:" << core_;
} else {
BuildScopeForControlFlowOp(*core_, *block, &cur_scope);
core_->reset_scope(&cur_scope);
}
core_->Run({}, false);

// NOTE: It's neccessary. The reason of associating `inside_grads` and
// `outside_grads` at runtime `RunImpl` instead of `assgin` op at block is
// that the Var name of grad_op's outputs may be changed in the
// `append_backward` function (e.g. `_addup_repetitive_outputs_`).
AssignLocalGradientToParentScope(
dev_place, cur_scope, scope, inside_grads, outside_grads, inputs);

// Release the cur_scope, otherwise memory leakage occurs.
scope.DeleteScope(&cur_scope);
return;
}

private:
mutable std::shared_ptr<framework::InterpreterCore> core_{nullptr};

private:
void AssignLocalGradientToParentScope(
Aurelius84 marked this conversation as resolved.
Show resolved Hide resolved
const platform::Place &place,
const framework::Scope &cur_scope,
const framework::Scope &parent_scope,
const std::vector<std::string> &inside_grads,
const std::vector<std::string> &outside_grads,
const std::vector<std::string> &inputs) const {
std::vector<std::string> assign_zero_outside_grads;
std::vector<std::string> assign_zero_inputs;
for (size_t i = 0; i < outside_grads.size(); ++i) {
const std::string &outside_grad_name = outside_grads[i];
const std::string &inside_grad_name = inside_grads[i];
VLOG(4) << "[assign local]"
<< "inside_grad_name = " << inside_grad_name
<< ", outside_grad_name = " << outside_grad_name;
framework::Variable *outside_var =
parent_scope.FindVar(outside_grad_name);
if (outside_var == nullptr) {
continue;
}
framework::Variable *inside_var =
cur_scope.FindLocalVar(inside_grad_name);
if (inside_var == nullptr) {
assign_zero_outside_grads.emplace_back(outside_grad_name);
assign_zero_inputs.emplace_back(inputs[i]);
continue;
}
platform::DeviceContext *dev_ctx =
platform::DeviceContextPool::Instance().Get(place);
framework::VisitVarType(*inside_var,
AssignFunctor(outside_var, *dev_ctx));
}
}
};

class StaticPyLayerBackwardInferShape : public framework::InferShapeBase {
public:
void operator()(framework::InferShapeContext *context) const override {
if (context->HasInputs(StaticPyLayerOp::kInputs) &&
context->HasOutputs(framework::GradVarName(StaticPyLayerOp::kInputs))) {
context->SetOutputsDim(framework::GradVarName(StaticPyLayerOp::kInputs),
context->GetInputsDim(StaticPyLayerOp::kInputs));
}
}
};

class StaticPyLayerBackwardInferVarType : public framework::VarTypeInference {
public:
void operator()(framework::InferVarTypeContext *ctx) const override {
auto forward_input_size = ctx->InputSize(StaticPyLayerOp::kInputs);
auto backward_output_size =
ctx->OutputSize(framework::GradVarName(StaticPyLayerOp::kInputs));
PADDLE_ENFORCE_EQ(forward_input_size,
backward_output_size,
platform::errors::InvalidArgument(
"input_size and output_size should be equal for "
"static_pylayer_grad_op."));
for (size_t i = 0; i < backward_output_size; ++i) {
ctx->SyncTypeAndDataType(StaticPyLayerOp::kInputs,
framework::GradVarName(StaticPyLayerOp::kInputs),
i);
}
}
};

template <typename T>
class StaticPyLayerBackwardMaker : public framework::SingleGradOpMaker<T> {
public:
using framework::SingleGradOpMaker<T>::SingleGradOpMaker;

protected:
void Apply(GradOpPtr<T> grad_op) const override {
grad_op->SetType("static_pylayer_grad");
grad_op->SetInput(StaticPyLayerOp::kInputs,
this->Input(StaticPyLayerOp::kInputs));
grad_op->SetInput(framework::GradVarName(StaticPyLayerOp::kOutputs),
this->OutputGrad(StaticPyLayerOp::kOutputs));
grad_op->SetInput(StaticPyLayerOp::kScope,
this->Output(StaticPyLayerOp::kScope));

auto fwd_inputs = this->InputGrad(StaticPyLayerOp::kInputs, false);
grad_op->SetOutput(framework::GradVarName(StaticPyLayerOp::kInputs),
fwd_inputs);
grad_op->SetBlockAttr("forward_block",
Aurelius84 marked this conversation as resolved.
Show resolved Hide resolved
PADDLE_GET_CONST(framework::BlockDesc *,
this->GetAttr("backward_block")));
}
};

} // namespace operators
} // namespace paddle

namespace ops = paddle::operators;
REGISTER_OPERATOR(static_pylayer,
ops::StaticPyLayerForwardOp,
ops::StaticPyLayerForwardInferShape,
ops::StaticPyLayerForwardOpProtoMaker,
ops::StaticPyLayerBackwardMaker<paddle::framework::OpDesc>);
REGISTER_OPERATOR(static_pylayer_grad,
ops::StaticPyLayerBackwardOp,
ops::StaticPyLayerBackwardInferShape,
ops::StaticPyLayerBackwardInferVarType);
Loading