提交 484a2da5 编写于 作者: Y Yu Yang 提交者: GitHub

Merge pull request #3587 from reyoung/feature/extract_op_info_into_op_info.cc

Feature/extract op info into op info.cc
...@@ -18,8 +18,8 @@ cc_test(scope_test SRCS scope_test.cc DEPS scope) ...@@ -18,8 +18,8 @@ cc_test(scope_test SRCS scope_test.cc DEPS scope)
proto_library(framework_proto SRCS framework.proto) proto_library(framework_proto SRCS framework.proto)
cc_library(attribute SRCS attribute.cc DEPS framework_proto) cc_library(attribute SRCS attribute.cc DEPS framework_proto)
cc_library(op_info SRCS op_info.cc DEPS attribute framework_proto)
cc_library(operator SRCS operator.cc DEPS framework_proto device_context tensor scope attribute) cc_library(operator SRCS operator.cc DEPS op_info device_context tensor scope)
cc_test(operator_test SRCS operator_test.cc DEPS operator op_registry) cc_test(operator_test SRCS operator_test.cc DEPS operator op_registry)
cc_library(grad_op_builder SRCS grad_op_builder.cc DEPS operator) cc_library(grad_op_builder SRCS grad_op_builder.cc DEPS operator)
......
...@@ -72,8 +72,8 @@ class NoGradOpMaker : public OpProtoAndCheckerMaker { ...@@ -72,8 +72,8 @@ class NoGradOpMaker : public OpProtoAndCheckerMaker {
class FcOp : public operators::NetOp { class FcOp : public operators::NetOp {
public: public:
FcOp(const std::string &type, const VarNameMap &inputs, FcOp(const std::string &type, const VariableNameMap &inputs,
const VarNameMap &outputs, const AttributeMap &attrs) const VariableNameMap &outputs, const AttributeMap &attrs)
: NetOp(type, inputs, outputs, attrs) { : NetOp(type, inputs, outputs, attrs) {
AppendOp(OpRegistry::CreateOp("mul", AppendOp(OpRegistry::CreateOp("mul",
{{"X", {Input("X")}}, {"Y", {Input("W")}}}, {{"X", {Input("X")}}, {"Y", {Input("W")}}},
......
...@@ -20,13 +20,13 @@ namespace framework { ...@@ -20,13 +20,13 @@ namespace framework {
enum class OpArgType { IN, OUT }; enum class OpArgType { IN, OUT };
static void TransOpArg(const OperatorBase* src_op, const OpArgType& src_type, static void TransOpArg(const OperatorBase* src_op, const OpArgType& src_type,
bool is_grad, OperatorBase::VarNameMap* vars) { bool is_grad, VariableNameMap* vars) {
const auto& src_inout = const auto& src_inout =
src_type == OpArgType::IN ? src_op->Inputs() : src_op->Outputs(); src_type == OpArgType::IN ? src_op->Inputs() : src_op->Outputs();
auto& dst_inout = *vars; auto& dst_inout = *vars;
const OpProto* proto = OpRegistry::op_info_map().at(src_op->Type()).proto_; auto& proto = OpInfoMap::Instance().Get(src_op->Type()).Proto();
const auto& src_arg_list = const auto& src_arg_list =
src_type == OpArgType::IN ? proto->inputs() : proto->outputs(); src_type == OpArgType::IN ? proto.inputs() : proto.outputs();
for (const auto& arg : src_arg_list) { for (const auto& arg : src_arg_list) {
if (arg.not_in_gradient() && !is_grad) continue; if (arg.not_in_gradient() && !is_grad) continue;
const std::string src_name = arg.name(); const std::string src_name = arg.name();
...@@ -40,26 +40,18 @@ static void TransOpArg(const OperatorBase* src_op, const OpArgType& src_type, ...@@ -40,26 +40,18 @@ static void TransOpArg(const OperatorBase* src_op, const OpArgType& src_type,
} }
OperatorBase* BuildGradOp(const OperatorBase* op) { OperatorBase* BuildGradOp(const OperatorBase* op) {
auto it = OpRegistry::op_info_map().find(op->Type()); auto& info = OpInfoMap::Instance().Get(op->Type());
PADDLE_ENFORCE(it != OpRegistry::op_info_map().end(), PADDLE_ENFORCE(info.HasGradientOp());
"'%s' has not been registered.", op->Type());
PADDLE_ENFORCE(it->second.proto_ != nullptr, "'%s' has no OpProto.",
op->Type());
std::string grad_op_type = it->second.grad_op_type_;
PADDLE_ENFORCE(!grad_op_type.empty(), "'%s' has no gradient operator.",
op->Type());
OperatorBase::VarNameMap inputs; VariableNameMap inputs;
OperatorBase::VarNameMap outputs; VariableNameMap outputs;
TransOpArg(op, OpArgType::IN, false, &inputs); // I TransOpArg(op, OpArgType::IN, false, &inputs); // I
TransOpArg(op, OpArgType::OUT, false, &inputs); // O TransOpArg(op, OpArgType::OUT, false, &inputs); // O
TransOpArg(op, OpArgType::OUT, true, &inputs); // OG TransOpArg(op, OpArgType::OUT, true, &inputs); // OG
TransOpArg(op, OpArgType::IN, true, &outputs); // IG TransOpArg(op, OpArgType::IN, true, &outputs); // IG
it = OpRegistry::op_info_map().find(grad_op_type); auto& grad_info = OpInfoMap::Instance().Get(info.grad_op_type_);
PADDLE_ENFORCE(it != OpRegistry::op_info_map().end(), return grad_info.Creator()(info.grad_op_type_, inputs, outputs, op->Attrs());
"'%s' has not been registered.", grad_op_type);
return it->second.creator_(grad_op_type, inputs, outputs, op->Attrs());
} }
} // namespace framework } // namespace framework
......
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/framework/op_info.h"
namespace paddle {
namespace framework {
static OpInfoMap* g_op_info_map = nullptr;
OpInfoMap& OpInfoMap::Instance() {
if (g_op_info_map == nullptr) {
g_op_info_map = new OpInfoMap();
}
return *g_op_info_map;
}
} // namespace framework
} // namespace paddle
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once
#include <functional>
#include <map>
#include <string>
#include <unordered_map>
#include "paddle/framework/attribute.h"
namespace paddle {
namespace framework {
class OperatorBase;
using VariableNameMap = std::map<std::string, std::vector<std::string>>;
using OpCreator = std::function<OperatorBase*(
const std::string& /*type*/, const VariableNameMap& /*inputs*/,
const VariableNameMap& /*outputs*/, const AttributeMap& /*attrs*/)>;
struct OpInfo {
OpCreator creator_;
std::string grad_op_type_;
OpProto* proto_;
OpAttrChecker* checker_;
bool HasOpProtoAndChecker() const {
return proto_ != nullptr && checker_ != nullptr;
}
const OpProto& Proto() const {
PADDLE_ENFORCE_NOT_NULL(proto_, "Operator Proto has not been registered");
PADDLE_ENFORCE(proto_->IsInitialized(),
"Operator Proto must be initialized in op info");
return *proto_;
}
const OpAttrChecker& Checker() const {
PADDLE_ENFORCE_NOT_NULL(checker_,
"Operator Checker has not been registered");
return *checker_;
}
const OpCreator& Creator() const {
PADDLE_ENFORCE_NOT_NULL(creator_,
"Operator Creator has not been registered");
return creator_;
}
bool HasGradientOp() const { return !grad_op_type_.empty(); }
};
class OpInfoMap {
public:
static OpInfoMap& Instance();
OpInfoMap(const OpInfoMap& o) = delete;
OpInfoMap(OpInfoMap&& o) = delete;
OpInfoMap& operator=(const OpInfoMap& o) = delete;
OpInfoMap& operator=(OpInfoMap&& o) = delete;
bool Has(const std::string& op_type) const {
return map_.find(op_type) != map_.end();
}
void Insert(const std::string& type, const OpInfo& info) {
PADDLE_ENFORCE(!Has(type), "Operator %s has been registered", type);
map_.insert({type, info});
}
const OpInfo& Get(const std::string& type) const {
auto it = map_.find(type);
PADDLE_ENFORCE(it != map_.end(), "Operator %s are not found", type);
return it->second;
}
template <typename Callback>
void IterAllInfo(Callback callback) {
for (auto& it : map_) {
callback(it.first, it.second);
}
}
private:
OpInfoMap() = default;
std::unordered_map<std::string, const OpInfo> map_;
};
} // namespace framework
} // namespace paddle
...@@ -19,32 +19,18 @@ limitations under the License. */ ...@@ -19,32 +19,18 @@ limitations under the License. */
namespace paddle { namespace paddle {
namespace framework { namespace framework {
std::unique_ptr<OperatorBase> OpRegistry::CreateOp(const std::string& type, std::unique_ptr<OperatorBase> OpRegistry::CreateOp(
const VarNameMap& inputs, const std::string& type, const VariableNameMap& inputs,
const VarNameMap& outputs, const VariableNameMap& outputs, AttributeMap attrs) {
AttributeMap attrs) { auto& info = OpInfoMap::Instance().Get(type);
auto it = op_info_map().find(type); info.Checker().Check(attrs);
PADDLE_ENFORCE(it != op_info_map().end(), auto op = info.Creator()(type, inputs, outputs, attrs);
"Operator '%s' has not been registered.", type);
it->second.checker_->Check(attrs);
auto op = it->second.creator_(type, inputs, outputs, attrs);
return std::unique_ptr<OperatorBase>(op); return std::unique_ptr<OperatorBase>(op);
} }
std::unique_ptr<OperatorBase> OpRegistry::CreateOp(const OpDesc& op_desc) { static VariableNameMap ConvertOpDescVarsToVarNameMap(
VarNameMap inputs = ConvertOpDescVarsToVarNameMap(op_desc.inputs());
VarNameMap outputs = ConvertOpDescVarsToVarNameMap(op_desc.outputs());
AttributeMap attrs;
for (auto& attr : op_desc.attrs()) {
attrs[attr.name()] = GetAttrValue(attr);
}
return CreateOp(op_desc.type(), inputs, outputs, attrs);
}
OperatorBase::VarNameMap OpRegistry::ConvertOpDescVarsToVarNameMap(
const google::protobuf::RepeatedPtrField<OpDesc::Var>& op_desc_vars) { const google::protobuf::RepeatedPtrField<OpDesc::Var>& op_desc_vars) {
VarNameMap ret_val; VariableNameMap ret_val;
for (auto& var : op_desc_vars) { for (auto& var : op_desc_vars) {
auto& var_names = ret_val[var.parameter()]; auto& var_names = ret_val[var.parameter()];
auto& var_names_in_proto = var.arguments(); auto& var_names_in_proto = var.arguments();
...@@ -55,6 +41,17 @@ OperatorBase::VarNameMap OpRegistry::ConvertOpDescVarsToVarNameMap( ...@@ -55,6 +41,17 @@ OperatorBase::VarNameMap OpRegistry::ConvertOpDescVarsToVarNameMap(
return ret_val; return ret_val;
} }
std::unique_ptr<OperatorBase> OpRegistry::CreateOp(const OpDesc& op_desc) {
VariableNameMap inputs = ConvertOpDescVarsToVarNameMap(op_desc.inputs());
VariableNameMap outputs = ConvertOpDescVarsToVarNameMap(op_desc.outputs());
AttributeMap attrs;
for (auto& attr : op_desc.attrs()) {
attrs[attr.name()] = GetAttrValue(attr);
}
return CreateOp(op_desc.type(), inputs, outputs, attrs);
}
std::unique_ptr<OperatorBase> OpRegistry::CreateGradOp(const OperatorBase& op) { std::unique_ptr<OperatorBase> OpRegistry::CreateGradOp(const OperatorBase& op) {
PADDLE_ENFORCE(!op.IsNetOp(), "Use framework::Backward to get backward ops"); PADDLE_ENFORCE(!op.IsNetOp(), "Use framework::Backward to get backward ops");
return std::unique_ptr<OperatorBase>(BuildGradOp(&op)); return std::unique_ptr<OperatorBase>(BuildGradOp(&op));
......
...@@ -23,6 +23,7 @@ limitations under the License. */ ...@@ -23,6 +23,7 @@ limitations under the License. */
#include "paddle/framework/attribute.h" #include "paddle/framework/attribute.h"
#include "paddle/framework/framework.pb.h" #include "paddle/framework/framework.pb.h"
#include "paddle/framework/grad_op_builder.h" #include "paddle/framework/grad_op_builder.h"
#include "paddle/framework/op_info.h"
#include "paddle/framework/operator.h" #include "paddle/framework/operator.h"
#include "paddle/framework/scope.h" #include "paddle/framework/scope.h"
...@@ -30,28 +31,16 @@ namespace paddle { ...@@ -30,28 +31,16 @@ namespace paddle {
namespace framework { namespace framework {
class OpRegistry { class OpRegistry {
using VarNameMap = OperatorBase::VarNameMap;
using OpCreator = std::function<OperatorBase*(
const std::string& /*type*/, const VarNameMap& /*inputs*/,
const VarNameMap& /*outputs*/, const AttributeMap& /*attrs*/)>;
public: public:
struct OpInfo {
OpCreator creator_;
std::string grad_op_type_;
OpProto* proto_;
OpAttrChecker* checker_;
};
template <typename OpType, typename ProtoMakerType, typename GradOpType> template <typename OpType, typename ProtoMakerType, typename GradOpType>
static void RegisterOp(const std::string& op_type, static void RegisterOp(const std::string& op_type,
const std::string& grad_op_type) { const std::string& grad_op_type) {
PADDLE_ENFORCE(op_info_map().count(op_type) == 0, PADDLE_ENFORCE(!OpInfoMap::Instance().Has(op_type),
"'%s' is registered more than once.", op_type); "'%s' is registered more than once.", op_type);
OpInfo op_info; OpInfo op_info;
op_info.creator_ = [](const std::string& type, const VarNameMap& inputs, op_info.creator_ = [](
const VarNameMap& outputs, const std::string& type, const VariableNameMap& inputs,
const AttributeMap& attrs) { const VariableNameMap& outputs, const AttributeMap& attrs) {
return new OpType(type, inputs, outputs, attrs); return new OpType(type, inputs, outputs, attrs);
}; };
op_info.grad_op_type_ = grad_op_type; op_info.grad_op_type_ = grad_op_type;
...@@ -70,7 +59,7 @@ class OpRegistry { ...@@ -70,7 +59,7 @@ class OpRegistry {
op_info.proto_ = nullptr; op_info.proto_ = nullptr;
op_info.checker_ = nullptr; op_info.checker_ = nullptr;
} }
op_info_map().insert(std::make_pair(op_type, op_info)); OpInfoMap::Instance().Insert(op_type, op_info);
// register gradient op // register gradient op
if (!grad_op_type.empty()) { if (!grad_op_type.empty()) {
RegisterOp<GradOpType, NOPMaker, NOP>(grad_op_type, ""); RegisterOp<GradOpType, NOPMaker, NOP>(grad_op_type, "");
...@@ -78,21 +67,13 @@ class OpRegistry { ...@@ -78,21 +67,13 @@ class OpRegistry {
} }
static std::unique_ptr<OperatorBase> CreateOp(const std::string& type, static std::unique_ptr<OperatorBase> CreateOp(const std::string& type,
const VarNameMap& inputs, const VariableNameMap& inputs,
const VarNameMap& outputs, const VariableNameMap& outputs,
AttributeMap attrs); AttributeMap attrs);
static std::unique_ptr<OperatorBase> CreateOp(const OpDesc& op_desc); static std::unique_ptr<OperatorBase> CreateOp(const OpDesc& op_desc);
static VarNameMap ConvertOpDescVarsToVarNameMap(
const google::protobuf::RepeatedPtrField<OpDesc::Var>& op_desc_vars);
static std::unique_ptr<OperatorBase> CreateGradOp(const OperatorBase& op); static std::unique_ptr<OperatorBase> CreateGradOp(const OperatorBase& op);
static std::unordered_map<std::string, const OpInfo>& op_info_map() {
static std::unordered_map<std::string, const OpInfo> op_info_map_;
return op_info_map_;
}
}; };
class Registrar { class Registrar {
......
...@@ -115,8 +115,8 @@ void OperatorBase::Rename(const std::string& old_name, ...@@ -115,8 +115,8 @@ void OperatorBase::Rename(const std::string& old_name,
} }
OperatorBase::OperatorBase(const std::string& type, OperatorBase::OperatorBase(const std::string& type,
const OperatorBase::VarNameMap& inputs, const VariableNameMap& inputs,
const OperatorBase::VarNameMap& outputs, const VariableNameMap& outputs,
const AttributeMap& attrs) const AttributeMap& attrs)
: type_(type), inputs_(inputs), outputs_(outputs), attrs_(attrs) { : type_(type), inputs_(inputs), outputs_(outputs), attrs_(attrs) {
static std::atomic<size_t> gUniqId(0UL); static std::atomic<size_t> gUniqId(0UL);
...@@ -141,18 +141,10 @@ std::vector<std::string> OperatorBase::OutputVars(bool has_intermediate) const { ...@@ -141,18 +141,10 @@ std::vector<std::string> OperatorBase::OutputVars(bool has_intermediate) const {
} }
return ret_val; return ret_val;
} }
auto it = OpRegistry::op_info_map().find(type_); auto& info = OpInfoMap::Instance().Get(Type());
PADDLE_ENFORCE(
it != OpRegistry::op_info_map().end(),
"Operator %s not registered, cannot figure out intermediate outputs",
type_);
PADDLE_ENFORCE(
it->second.proto_ != nullptr,
"Operator %s has no OpProto, cannot figure out intermediate outputs",
type_);
// get all OpProto::Var for outputs // get all OpProto::Var for outputs
for (auto& o : it->second.proto_->outputs()) { for (auto& o : info.Proto().outputs()) {
// ignore all intermediate output // ignore all intermediate output
if (o.intermediate()) continue; if (o.intermediate()) continue;
auto out = outputs_.find(o.name()); auto out = outputs_.find(o.name());
......
...@@ -19,6 +19,7 @@ limitations under the License. */ ...@@ -19,6 +19,7 @@ limitations under the License. */
#include <unordered_map> #include <unordered_map>
#include <vector> #include <vector>
#include "op_info.h"
#include "paddle/framework/attribute.h" #include "paddle/framework/attribute.h"
#include "paddle/framework/framework.pb.h" #include "paddle/framework/framework.pb.h"
#include "paddle/framework/scope.h" #include "paddle/framework/scope.h"
...@@ -62,10 +63,8 @@ class ExecutionContext; ...@@ -62,10 +63,8 @@ class ExecutionContext;
*/ */
class OperatorBase { class OperatorBase {
public: public:
using VarNameMap = std::map<std::string, std::vector<std::string>>; OperatorBase(const std::string& type, const VariableNameMap& inputs,
const VariableNameMap& outputs, const AttributeMap& attrs);
OperatorBase(const std::string& type, const VarNameMap& inputs,
const VarNameMap& outputs, const AttributeMap& attrs);
virtual ~OperatorBase() {} virtual ~OperatorBase() {}
...@@ -93,8 +92,8 @@ class OperatorBase { ...@@ -93,8 +92,8 @@ class OperatorBase {
/// rename inputs outputs name /// rename inputs outputs name
void Rename(const std::string& old_name, const std::string& new_name); void Rename(const std::string& old_name, const std::string& new_name);
const VarNameMap& Inputs() const { return inputs_; } const VariableNameMap& Inputs() const { return inputs_; }
const VarNameMap& Outputs() const { return outputs_; } const VariableNameMap& Outputs() const { return outputs_; }
//! Get a input with argument's name described in `op_proto` //! Get a input with argument's name described in `op_proto`
const std::string& Input(const std::string& name) const; const std::string& Input(const std::string& name) const;
//! Get a input which has multiple variables. //! Get a input which has multiple variables.
...@@ -122,30 +121,32 @@ class OperatorBase { ...@@ -122,30 +121,32 @@ class OperatorBase {
// I (Inputs)opear // I (Inputs)opear
// O (Outputs) // O (Outputs)
// OG (Output Gradients) // OG (Output Gradients)
VarNameMap inputs_; VariableNameMap inputs_;
// NOTE: in case of OpGrad, outputs_ contains // NOTE: in case of OpGrad, outputs_ contains
// IG (Inputs Gradients) // IG (Inputs Gradients)
VarNameMap outputs_; VariableNameMap outputs_;
AttributeMap attrs_; AttributeMap attrs_;
}; };
// Macro for define a clone method. // Macro for define a clone method.
// If you are writing an kernel operator, `Clone` will be defined when you // If you are writing an kernel operator, `Clone` will be defined when you
// register it. i.e. `Clone` method is not needed to define by yourself. // register it. i.e. `Clone` method is not needed to define by yourself.
#define DEFINE_OP_CLONE_METHOD(CLS) \ #define DEFINE_OP_CLONE_METHOD(cls) \
std::unique_ptr<OperatorBase> Clone() const final { \ std::unique_ptr<OperatorBase> Clone() const final { \
return std::unique_ptr<OperatorBase>(new CLS(*this)); \ return std::unique_ptr<OperatorBase>(new cls(*this)); \
} }
// Macro for define a default constructor for Operator. // Macro for define a default constructor for Operator.
// You can also use // You can also use
// using PARENT_CLASS::PARENT_CLASS; // using PARENT_CLASS::PARENT_CLASS;
// to use parent's constructor. // to use parent's constructor.
#define DEFINE_OP_CONSTRUCTOR(CLS, PARENT_CLS) \ #define DEFINE_OP_CONSTRUCTOR(cls, parent_cls) \
CLS(const std::string& type, const VarNameMap& inputs, \ cls(const std::string& type, \
const VarNameMap& outputs, const paddle::framework::AttributeMap& attrs) \ const ::paddle::framework::VariableNameMap& inputs, \
: PARENT_CLS(type, inputs, outputs, attrs) {} const ::paddle::framework::VariableNameMap& outputs, \
const paddle::framework::AttributeMap& attrs) \
: parent_cls(type, inputs, outputs, attrs) {}
class NOP : public OperatorBase { class NOP : public OperatorBase {
public: public:
...@@ -389,8 +390,8 @@ class OperatorWithKernel : public OperatorBase { ...@@ -389,8 +390,8 @@ class OperatorWithKernel : public OperatorBase {
using OpKernelMap = using OpKernelMap =
std::unordered_map<OpKernelKey, std::unique_ptr<OpKernel>, OpKernelHash>; std::unordered_map<OpKernelKey, std::unique_ptr<OpKernel>, OpKernelHash>;
OperatorWithKernel(const std::string& type, const VarNameMap& inputs, OperatorWithKernel(const std::string& type, const VariableNameMap& inputs,
const VarNameMap& outputs, const AttributeMap& attrs) const VariableNameMap& outputs, const AttributeMap& attrs)
: OperatorBase(type, inputs, outputs, attrs) {} : OperatorBase(type, inputs, outputs, attrs) {}
void InferShape(const Scope& scope) const override { void InferShape(const Scope& scope) const override {
......
...@@ -23,8 +23,8 @@ static int op_run_num = 0; ...@@ -23,8 +23,8 @@ static int op_run_num = 0;
class OpWithoutKernelTest : public OperatorBase { class OpWithoutKernelTest : public OperatorBase {
public: public:
OpWithoutKernelTest(const std::string& type, const VarNameMap& inputs, OpWithoutKernelTest(const std::string& type, const VariableNameMap& inputs,
const VarNameMap& outputs, const AttributeMap& attrs) const VariableNameMap& outputs, const AttributeMap& attrs)
: OperatorBase(type, inputs, outputs, attrs), x(1) {} : OperatorBase(type, inputs, outputs, attrs), x(1) {}
void InferShape(const Scope& scope) const override {} void InferShape(const Scope& scope) const override {}
void Run(const Scope& scope, void Run(const Scope& scope,
...@@ -249,8 +249,9 @@ TEST(OpKernel, multi_inputs) { ...@@ -249,8 +249,9 @@ TEST(OpKernel, multi_inputs) {
class OperatorClone : public paddle::framework::OperatorBase { class OperatorClone : public paddle::framework::OperatorBase {
public: public:
DEFINE_OP_CLONE_METHOD(OperatorClone); DEFINE_OP_CLONE_METHOD(OperatorClone);
OperatorClone(const std::string& type, const VarNameMap& inputs, OperatorClone(const std::string& type,
const VarNameMap& outputs, const paddle::framework::VariableNameMap& inputs,
const paddle::framework::VariableNameMap& outputs,
const paddle::framework::AttributeMap& attrs) const paddle::framework::AttributeMap& attrs)
: OperatorBase(type, inputs, outputs, attrs) {} : OperatorBase(type, inputs, outputs, attrs) {}
void InferShape(const paddle::framework::Scope& scope) const override {} void InferShape(const paddle::framework::Scope& scope) const override {}
......
...@@ -139,19 +139,16 @@ All parameter, weight, gradient are variables in Paddle. ...@@ -139,19 +139,16 @@ All parameter, weight, gradient are variables in Paddle.
//! @note: Be careful! PyBind will return std::string as an unicode, not //! @note: Be careful! PyBind will return std::string as an unicode, not
//! Python str. If you want a str object, you should cast them in Python. //! Python str. If you want a str object, you should cast them in Python.
m.def("get_all_op_protos", []() -> std::vector<py::bytes> { m.def("get_all_op_protos", []() -> std::vector<py::bytes> {
auto &op_info_map = OpRegistry::op_info_map();
std::vector<py::bytes> ret_values; std::vector<py::bytes> ret_values;
for (auto it = op_info_map.begin(); it != op_info_map.end(); ++it) {
const OpProto *proto = it->second.proto_; OpInfoMap::Instance().IterAllInfo([&ret_values](const std::string &type,
if (proto == nullptr) { const OpInfo &info) {
continue; if (!info.HasOpProtoAndChecker()) return;
}
PADDLE_ENFORCE(proto->IsInitialized(), "OpProto must all be initialized");
std::string str; std::string str;
PADDLE_ENFORCE(proto->SerializeToString(&str), PADDLE_ENFORCE(info.Proto().SerializeToString(&str),
"Serialize OpProto Error. This could be a bug of Paddle."); "Serialize OpProto Error. This could be a bug of Paddle.");
ret_values.push_back(py::bytes(str)); ret_values.emplace_back(str);
} });
return ret_values; return ret_values;
}); });
m.def_submodule( m.def_submodule(
......
...@@ -81,9 +81,8 @@ std::vector<std::string> NetOp::OutputVars(bool has_intermediate) const { ...@@ -81,9 +81,8 @@ std::vector<std::string> NetOp::OutputVars(bool has_intermediate) const {
return ret_val; return ret_val;
} }
NetOp::NetOp(const std::string& type, NetOp::NetOp(const std::string& type, const framework::VariableNameMap& inputs,
const framework::OperatorBase::VarNameMap& inputs, const framework::VariableNameMap& outputs,
const framework::OperatorBase::VarNameMap& outputs,
const framework::AttributeMap& attrs) const framework::AttributeMap& attrs)
: framework::OperatorBase(type, inputs, outputs, attrs) {} : framework::OperatorBase(type, inputs, outputs, attrs) {}
......
...@@ -38,8 +38,10 @@ class NetOp : public framework::OperatorBase { ...@@ -38,8 +38,10 @@ class NetOp : public framework::OperatorBase {
public: public:
static const char kAll[]; static const char kAll[];
NetOp() : framework::OperatorBase("plain_net", {}, {}, {}) {} NetOp() : framework::OperatorBase("plain_net", {}, {}, {}) {}
NetOp(const std::string& type, const VarNameMap& inputs,
const VarNameMap& outputs, const framework::AttributeMap& attrs); NetOp(const std::string& type, const framework::VariableNameMap& inputs,
const framework::VariableNameMap& outputs,
const framework::AttributeMap& attrs);
NetOp(const NetOp& o) : framework::OperatorBase(o.type_, {}, {}, o.attrs_) { NetOp(const NetOp& o) : framework::OperatorBase(o.type_, {}, {}, o.attrs_) {
this->ops_.reserve(o.ops_.size()); this->ops_.reserve(o.ops_.size());
......
...@@ -131,8 +131,8 @@ const rnn::ArgumentName RecurrentGradientOp::kArgName{ ...@@ -131,8 +131,8 @@ const rnn::ArgumentName RecurrentGradientOp::kArgName{
"memories", "pre_memories", "boot_memories@grad"}; "memories", "pre_memories", "boot_memories@grad"};
RecurrentOp::RecurrentOp(const std::string& type, RecurrentOp::RecurrentOp(const std::string& type,
const framework::OperatorBase::VarNameMap& inputs, const framework::VariableNameMap& inputs,
const framework::OperatorBase::VarNameMap& outputs, const framework::VariableNameMap& outputs,
const framework::AttributeMap& attrs) const framework::AttributeMap& attrs)
: OperatorBase(type, inputs, outputs, attrs) { : OperatorBase(type, inputs, outputs, attrs) {
rnn::InitArgument(kArgName, &arg_, *this); rnn::InitArgument(kArgName, &arg_, *this);
...@@ -223,8 +223,8 @@ void RecurrentGradientAlgorithm::InferShape(const Scope& scope) const { ...@@ -223,8 +223,8 @@ void RecurrentGradientAlgorithm::InferShape(const Scope& scope) const {
} }
RecurrentGradientOp::RecurrentGradientOp( RecurrentGradientOp::RecurrentGradientOp(
const std::string& type, const framework::OperatorBase::VarNameMap& inputs, const std::string& type, const framework::VariableNameMap& inputs,
const framework::OperatorBase::VarNameMap& outputs, const framework::VariableNameMap& outputs,
const framework::AttributeMap& attrs) const framework::AttributeMap& attrs)
: OperatorBase(type, inputs, outputs, attrs) { : OperatorBase(type, inputs, outputs, attrs) {
rnn::InitArgument(kArgName, &arg_, *this); rnn::InitArgument(kArgName, &arg_, *this);
......
...@@ -114,8 +114,9 @@ class RecurrentGradientAlgorithm { ...@@ -114,8 +114,9 @@ class RecurrentGradientAlgorithm {
class RecurrentOp : public framework::OperatorBase { class RecurrentOp : public framework::OperatorBase {
public: public:
RecurrentOp(const std::string& type, const VarNameMap& inputs, RecurrentOp(const std::string& type, const framework::VariableNameMap& inputs,
const VarNameMap& outputs, const framework::AttributeMap& attrs); const framework::VariableNameMap& outputs,
const framework::AttributeMap& attrs);
RecurrentOp(const RecurrentOp& o) RecurrentOp(const RecurrentOp& o)
: framework::OperatorBase( : framework::OperatorBase(
...@@ -150,8 +151,9 @@ class RecurrentOp : public framework::OperatorBase { ...@@ -150,8 +151,9 @@ class RecurrentOp : public framework::OperatorBase {
class RecurrentGradientOp : public framework::OperatorBase { class RecurrentGradientOp : public framework::OperatorBase {
public: public:
RecurrentGradientOp(const std::string& type, const VarNameMap& inputs, RecurrentGradientOp(const std::string& type,
const VarNameMap& outputs, const framework::VariableNameMap& inputs,
const framework::VariableNameMap& outputs,
const framework::AttributeMap& attrs); const framework::AttributeMap& attrs);
RecurrentGradientOp(const RecurrentGradientOp& o) RecurrentGradientOp(const RecurrentGradientOp& o)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册