-
Notifications
You must be signed in to change notification settings - Fork 5.6k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Customizable Python Layer in Dygraph #32130
Changes from all commits
e48e0d1
fb2e34b
6fbcf4e
b56fda6
e73ca66
0c594bd
baf4793
4f52f6c
14d6080
7ac0403
3e5ae14
01557fc
dd592db
a774fa9
4a69bdc
d6b7f24
72b5ab1
1b1a5cc
19fe271
3e27b57
24a78e2
3cf1cc0
3813c3d
e219513
c7405d3
f01faec
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,172 @@ | ||
// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. | ||
// | ||
// Licensed under the Apache License, Version 2.0 (the "License"); | ||
// you may not use this file except in compliance with the License. | ||
// You may obtain a copy of the License at | ||
// | ||
// http://www.apache.org/licenses/LICENSE-2.0 | ||
// | ||
// Unless required by applicable law or agreed to in writing, software | ||
// distributed under the License is distributed on an "AS IS" BASIS, | ||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
// See the License for the specific language governing permissions and | ||
// limitations under the License. | ||
|
||
#pragma once | ||
|
||
#include <string> | ||
#include <vector> | ||
#include "paddle/fluid/imperative/layer.h" | ||
#include "paddle/fluid/imperative/tracer.h" | ||
|
||
#include "paddle/fluid/framework/op_registry.h" | ||
#include "paddle/fluid/framework/type_defs.h" | ||
#include "paddle/fluid/operators/py_layer_op.h" | ||
|
||
namespace paddle { | ||
namespace imperative { | ||
|
||
namespace py = ::pybind11; | ||
|
||
bool RequiredGrad(const NameVarBaseMap& ins, const NameVarBaseMap& outs) { | ||
for (const auto& name_pair : ins) { | ||
for (const auto& var_base : name_pair.second) { | ||
if (!var_base->OverridedStopGradient()) { | ||
PassStopGradient(outs, var_base->OverridedStopGradient()); | ||
return true; | ||
} | ||
} | ||
} | ||
return false; | ||
} | ||
|
||
std::shared_ptr<GradOpNode> CreateGradOpNode( | ||
const std::string& type, const NameVarBaseMap& ins, | ||
const NameVarBaseMap& outs, const framework::AttributeMap& attrs, | ||
const platform::Place& place, | ||
const std::map<std::string, std::string>& inplace_map, | ||
const std::shared_ptr<operators::PyLayerContext>& py_context) { | ||
operators::PyLayerGradOpMaker<paddle::imperative::OpBase> maker( | ||
type, ins, outs, attrs, inplace_map); | ||
|
||
maker.SetPyLayerContext(py_context); | ||
auto grad_node = maker(); | ||
if (grad_node && !grad_node->empty()) { | ||
for (auto& grad_op : *grad_node) { | ||
grad_op.SetId(OpBase::GenerateUniqueId()); | ||
grad_op.SetPlace(place); | ||
ClearNoNeedBufferInputs(&grad_op); | ||
} | ||
return grad_node; | ||
} else { | ||
return nullptr; | ||
} | ||
} | ||
|
||
py::object PyLayerApply(const platform::Place& place, const py::object& cls, | ||
const py::args args, const py::kwargs kwargs) { | ||
auto bk_function = cls.attr("_backward_function"); | ||
auto context = bk_function(); | ||
auto forward = cls.attr("forward"); | ||
|
||
auto result_forward = forward(context, *args, **kwargs); | ||
std::shared_ptr<operators::PyLayerContext> py_layer_ctx = | ||
std::make_shared<operators::PyLayerContext>(context.release().ptr()); | ||
// make inputs to varbase | ||
std::vector<std::shared_ptr<imperative::VarBase>> input_vars; | ||
// process args,`input_vars` only collect `imperative::VarBase` | ||
if (!args.empty()) { | ||
for (auto ptr = args.begin(); ptr != args.end(); ptr++) { | ||
try { | ||
if (Py_None != ptr->ptr()) { | ||
auto a = ptr->cast<std::shared_ptr<VarBase>>(); | ||
input_vars.push_back(a); | ||
} | ||
} catch (py::cast_error& err) { | ||
// Only collect Tensor type in 'args' and pass them to backward. Ignore | ||
// other types of input temporarily. | ||
} | ||
} | ||
} | ||
// process kwargs, only collect `imperative::VarBase` | ||
if (!kwargs.empty()) { | ||
for (auto ptr = kwargs.begin(); ptr != kwargs.end(); ptr++) { | ||
try { | ||
if (Py_None != ptr->second.ptr()) { | ||
auto a = ptr->second.cast<std::shared_ptr<VarBase>>(); | ||
input_vars.push_back(a); | ||
} | ||
} catch (py::cast_error&) { | ||
// Only collect Tensor type in 'kwargs' and pass them to backward. | ||
// Ignore other types of input temporarily. | ||
} | ||
} | ||
} | ||
NameVarBaseMap ins = {{"X", input_vars}}; | ||
|
||
std::vector<std::shared_ptr<imperative::VarBase>> output_vars; | ||
if (PyTuple_Check(result_forward.ptr()) || | ||
PyList_Check(result_forward.ptr())) { | ||
auto tuple_result = result_forward.cast<py::tuple>(); | ||
for (size_t i = 0; i < tuple_result.size(); i++) { | ||
if (Py_None != tuple_result[i].ptr()) { | ||
try { | ||
auto temp_out = | ||
tuple_result[i].cast<std::shared_ptr<imperative::VarBase>>(); | ||
output_vars.push_back(temp_out); | ||
} catch (py::cast_error&) { | ||
PADDLE_THROW(platform::errors::Unimplemented( | ||
"The output of `PyLayer.forward` should be `Tensor`.")); | ||
} | ||
} else { | ||
PADDLE_THROW(platform::errors::Unimplemented( | ||
"The output of `PyLayer.forward` can not be `None`.")); | ||
} | ||
} | ||
} else { | ||
if (Py_None != result_forward.ptr()) { | ||
try { | ||
auto temp_out = | ||
result_forward.cast<std::shared_ptr<imperative::VarBase>>(); | ||
output_vars.push_back(temp_out); | ||
} catch (py::cast_error&) { | ||
PADDLE_THROW(platform::errors::Unimplemented( | ||
"The output of `PyLayer.forward` should be `Tensor`.")); | ||
} | ||
} else { | ||
PADDLE_THROW(platform::errors::Unimplemented( | ||
"The output of `PyLayer.forward` can not be `None`.")); | ||
} | ||
} | ||
|
||
NameVarBaseMap outs = {{"Out", output_vars}}; | ||
|
||
if (RequiredGrad(ins, outs)) { | ||
std::map<std::string, std::string> inplace_map{}; | ||
bool if_inplace = false; | ||
for (auto temp_ins : input_vars) { | ||
if (if_inplace) { | ||
break; | ||
} | ||
for (auto temp_outs : output_vars) { | ||
if (temp_ins->Name() == temp_outs->Name()) { | ||
if_inplace = true; | ||
break; | ||
} | ||
} | ||
} | ||
if (if_inplace) { | ||
inplace_map["X"] = "Out"; | ||
} | ||
|
||
CreateGradOpNode("py_layer", ins, outs, {{}}, place, inplace_map, | ||
py_layer_ctx); | ||
} else { | ||
VLOG(3) << "No Grad to track for Op: py_layer_op"; | ||
} | ||
|
||
return result_forward; | ||
} | ||
|
||
} // namespace imperative | ||
} // namespace paddle |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -130,5 +130,7 @@ void IncreaseVarbaseReferenceCountUntilCopyComplete( | |
const std::shared_ptr<imperative::VarBase>& var, | ||
const platform::Place& place); | ||
|
||
void PassStopGradient(const NameVarBaseMap& outs, bool generate_grad); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 前面加空行 There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done, thx. |
||
|
||
} // namespace imperative | ||
} // namespace paddle |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -38,6 +38,9 @@ class VariableWrapper { | |
|
||
explicit VariableWrapper(const std::string& name) : name_(name) {} | ||
|
||
VariableWrapper(const std::string& name, const framework::Variable& variable) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 同上,函数声明或实现前后加空行让代码区分明显一些 There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done, thx. |
||
: var_(variable), name_(name) {} | ||
|
||
~VariableWrapper() { VLOG(10) << "Destruct VariableWrapper: " << Name(); } | ||
|
||
const framework::Variable& Var() const { return var_; } | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
add blank line before and after this statement?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done, thx.