提交 5d132ecf 编写于 作者: M mozga-intel 提交者: tensor-tang

Auto-cmake generator, auto-fill map (#15402)

test=develop
上级 3831a469
......@@ -2,4 +2,5 @@ if(WITH_NGRAPH)
cc_library(ngraph_bridge SRCS ngraph_bridge.cc DEPS operator framework_proto ngraph)
cc_library(ngraph_engine SRCS ngraph_engine.cc DEPS ngraph_bridge framework_proto)
op_library(ngraph_engine_op DEPS ngraph_engine op_registry op_info device_context)
add_subdirectory(ops)
endif()
......@@ -19,50 +19,21 @@ limitations under the License. */
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ngraph_bridge.h"
#include "paddle/fluid/operators/ngraph/ngraph_ops.h"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/enforce.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
namespace operators {
namespace NG_OPS = paddle::operators::ngraphs;
std::map<std::string,
std::function<void(const std::shared_ptr<framework::OperatorBase>&,
std::shared_ptr<std::unordered_map<
std::string, std::shared_ptr<ngraph::Node>>>)>>
NgraphBridge::NG_NODE_MAP = {
{"accuracy", NG_OPS::BuildAccuracyNode},
{"conv2d", NG_OPS::BuildConv2dNode},
{"conv2d_grad", NG_OPS::BuildConv2dGradNode},
{"batch_norm", NG_OPS::BuildBatchNormNode},
{"batch_norm_grad", NG_OPS::BuildBatchNormGradNode},
{"cross_entropy", NG_OPS::BuildCrossEntropyNode},
{"cross_entropy_grad", NG_OPS::BuildCrossEntropyGradNode},
{"elementwise_add", NG_OPS::BuildElementwiseAddNode},
{"elementwise_add_grad", NG_OPS::BuildElementwiseAddGradNode},
{"fill_constant", NG_OPS::BuildFillConstantNode},
{"mean", NG_OPS::BuildMeanNode},
{"mean_grad", NG_OPS::BuildMeanGradNode},
{"momentum", NG_OPS::BuildMomentumNode},
{"mul", NG_OPS::BuildMulNode},
{"mul_grad", NG_OPS::BuildMulGradNode},
{"pool2d", NG_OPS::BuildPool2dNode},
{"pool2d_grad", NG_OPS::BuildPool2dGradNode},
{"softmax", NG_OPS::BuildSoftmaxNode},
{"softmax_grad", NG_OPS::BuildSoftmaxGradNode},
{"scale", NG_OPS::BuildScaleNode},
{"sigmoid", NG_OPS::BuildUnaryNode<ngraph::op::Sigmoid>},
{"sum", NG_OPS::BuildSumNode},
{"relu", NG_OPS::BuildUnaryNode<ngraph::op::Relu>},
{"relu_grad", NG_OPS::BuildReluGradNode},
{"tanh", NG_OPS::BuildUnaryNode<ngraph::op::Tanh>},
{"tanh_grad", NG_OPS::BuildTanhGradNode},
{"top_k", NG_OPS::BuildTopKNode}};
bool NgraphBridge::isRegister(const std::string& str) {
return ops::NgraphSingleton::Lookup(str);
}
void NgraphBridge::BuildNgNode(
const std::shared_ptr<framework::OperatorBase>& op) {
auto& op_type = op->Type();
NG_NODE_MAP[op_type](op, ngb_node_map_);
ops::NgraphSingleton::BuildNode(ngb_node_map_, op, op_type);
}
} // namespace operators
......
......@@ -28,13 +28,6 @@ namespace operators {
class NgraphBridge {
public:
static std::map<
std::string,
std::function<void(const std::shared_ptr<framework::OperatorBase>&,
std::shared_ptr<std::unordered_map<
std::string, std::shared_ptr<ngraph::Node>>>)>>
NG_NODE_MAP;
explicit NgraphBridge(
std::shared_ptr<
std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
......@@ -43,6 +36,8 @@ class NgraphBridge {
void BuildNgNode(const std::shared_ptr<framework::OperatorBase>& op);
static bool isRegister(const std::string& str);
private:
std::shared_ptr<
std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
......
......@@ -88,14 +88,12 @@ static std::vector<std::vector<int>> NgraphOpIntervals(
int pivot = left;
while (pivot < right) {
auto op_type = ops.at(pivot)->Type();
if (NgraphBridge::NG_NODE_MAP.find(op_type) ==
NgraphBridge::NG_NODE_MAP.end()) {
if (NgraphBridge::isRegister(op_type)) {
++pivot;
} else {
int start = pivot, end = start;
while (pivot < right &&
(NgraphBridge::NG_NODE_MAP.find(ops.at(pivot)->Type()) !=
NgraphBridge::NG_NODE_MAP.end())) {
(!NgraphBridge::isRegister(ops.at(pivot)->Type()))) {
++pivot;
++end;
}
......
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
/*
* This file contains the list of the ngraph operators for Paddle.
*
* ATTENTION: It requires some C++11 features, for lower version C++ or C, we
* might release another API.
*/
#pragma once
#include "ops/accuracy_op.h"
#include "ops/activation_op.h"
#include "ops/batch_norm_op.h"
#include "ops/binary_unary_op.h"
#include "ops/conv2d_op.h"
#include "ops/cross_entropy_op.h"
#include "ops/elementwise_add_op.h"
#include "ops/fill_constant_op.h"
#include "ops/mean_op.h"
#include "ops/momentum_op.h"
#include "ops/mul_op.h"
#include "ops/pool2d_op.h"
#include "ops/scale_op.h"
#include "ops/softmax_op.h"
#include "ops/sum_op.h"
#include "ops/top_k_op.h"
file(GLOB LIST_OPS RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" "*.h")
set(pass_file ${PADDLE_BINARY_DIR}/paddle/fluid/operators/ngraph/ngraph_ops.h)
file(APPEND ${pass_file} "\#pragma once\n")
file(WRITE ${pass_file} "// Generated by the /paddle/fluid/operators/ngraph/ops/CMakeLists.txt. DO NOT EDIT!\n\n")
foreach(OPS_NAME ${LIST_OPS})
file(APPEND ${pass_file} "\#include \"paddle/fluid/operators/ngraph/ops/${OPS_NAME}\"\n")
endforeach(OPS_NAME)
......@@ -17,6 +17,7 @@ limitations under the License. */
#include <string>
#include <vector>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -63,3 +64,5 @@ void BuildAccuracyNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(accuracy, BuildAccuracyNode);
......@@ -17,6 +17,7 @@ limitations under the License. */
#include <string>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -50,3 +51,6 @@ void BuildTanhGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(relu_grad, BuildReluGradNode);
REGISTER_NG_OP(than_grad, BuildTanhGradNode);
......@@ -20,6 +20,7 @@ limitations under the License. */
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/elementwise_node.h"
#include "paddle/fluid/operators/ngraph/ops/elementwise_scalar_op.h"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -155,3 +156,6 @@ void BuildBatchNormGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(batch_norm, BuildBatchNormNode);
REGISTER_NG_OP(batch_norm_grad, BuildBatchNormGradNode);
......@@ -16,6 +16,7 @@ limitations under the License. */
#include <string>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -47,3 +48,7 @@ static void BuildUnaryNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(relu, BuildUnaryNode<ngraph::op::Relu>);
REGISTER_NG_OP(tanh, BuildUnaryNode<ngraph::op::Tanh>);
REGISTER_NG_OP(sigmoid, BuildUnaryNode<ngraph::op::Sigmoid>);
......@@ -17,6 +17,7 @@ limitations under the License. */
#include <string>
#include <vector>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -233,3 +234,6 @@ void BuildConv2dGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(conv2d, BuildConv2dNode);
REGISTER_NG_OP(conv2d_grad, BuildConv2dGradNode);
......@@ -18,6 +18,7 @@ limitations under the License. */
#include <string>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -143,3 +144,6 @@ void BuildCrossEntropyGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(cross_entropy, BuildCrossEntropyNode);
REGISTER_NG_OP(cross_entropy_grad, BuildCrossEntropyGradNode);
......@@ -19,6 +19,7 @@ limitations under the License. */
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/elementwise_node.h"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -85,3 +86,6 @@ void BuildElementwiseAddGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(elementwise_add, BuildElementwiseAddNode);
REGISTER_NG_OP(elementwise_add_grad, BuildElementwiseAddGradNode);
......@@ -17,6 +17,7 @@ limitations under the License. */
#include <string>
#include <vector>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -55,3 +56,5 @@ void BuildFillConstantNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(fill_constant, BuildFillConstantNode);
......@@ -19,6 +19,7 @@ limitations under the License. */
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/elementwise_scalar_op.h"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -64,3 +65,6 @@ void BuildMeanGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(mean, BuildMeanNode);
REGISTER_NG_OP(mean_grad, BuildMeanGradNode);
......@@ -17,6 +17,7 @@ limitations under the License. */
#include <string>
#include <vector>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -99,3 +100,5 @@ void BuildMomentumNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(momentum, BuildMomentumNode);
......@@ -16,6 +16,7 @@ limitations under the License. */
#include <string>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -130,3 +131,6 @@ static void BuildMulGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(mul, BuildMulNode);
REGISTER_NG_OP(mul_grad, BuildMulGradNode);
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once
#include <algorithm>
#include <map>
#include <string>
#include <unordered_map>
#include "ngraph/node.hpp"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/operators/ngraph/ngraph_bridge.h"
#include "paddle/fluid/platform/enforce.h"
namespace paddle {
namespace operators {
namespace ops {
class NgraphSingleton {
NgraphSingleton() = default;
NgraphSingleton(NgraphSingleton const&) = delete;
void operator=(NgraphSingleton const) = delete;
~NgraphSingleton() = default;
static std::map<
std::string,
std::function<void(const std::shared_ptr<framework::OperatorBase>&,
std::shared_ptr<std::unordered_map<
std::string, std::shared_ptr<ngraph::Node>>>)>>
ng_node_maps_;
public:
template <typename TF>
static void Register(TF&& tf, const std::string& name) {
ng_node_maps_[name] = tf;
}
static bool Lookup(const std::string& name) {
auto it = ng_node_maps_.find(name);
if (it == ng_node_maps_.end()) {
return true;
}
return false;
}
static void BuildNode(
const std::shared_ptr<std::unordered_map<
std::string, std::shared_ptr<ngraph::Node>>>& ng_maps,
const std::shared_ptr<framework::OperatorBase>& op,
const std::string& name) {
ng_node_maps_[name](op, ng_maps);
}
};
std::map<std::string,
std::function<void(const std::shared_ptr<framework::OperatorBase>&,
std::shared_ptr<std::unordered_map<
std::string, std::shared_ptr<ngraph::Node>>>)>>
NgraphSingleton::ng_node_maps_;
} // namespace ops
} // namespace operators
} // namespace paddle
#define REGISTER_NG_OP(op_type__, Converter__) \
struct ng_##op_type__##_converter { \
ng_##op_type__##_converter() { \
paddle::operators::ops::NgraphSingleton::Register( \
paddle::operators::ngraphs::Converter__, #op_type__); \
} \
}; \
ng_##op_type__##_converter ng_##op_type__##_converter__;
......@@ -18,6 +18,7 @@ limitations under the License. */
#include <vector>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -172,3 +173,6 @@ void BuildPool2dGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(pool2d, BuildPool2dNode);
REGISTER_NG_OP(pool2d_grad, BuildPool2dGradNode);
......@@ -17,6 +17,7 @@ limitations under the License. */
#include <string>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/elementwise_scalar_op.h"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -37,3 +38,5 @@ void BuildScaleNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(scale, BuildScaleNode);
......@@ -18,6 +18,7 @@ limitations under the License. */
#include <vector>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/elementwise_scalar_op.h"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -72,3 +73,6 @@ void BuildSoftmaxGradNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(softmax, BuildSoftmaxNode);
REGISTER_NG_OP(softmax_grad, BuildSoftmaxGradNode);
......@@ -16,6 +16,7 @@ limitations under the License. */
#include <string>
#include "ngraph/ngraph.hpp"
#include "paddle/fluid/operators/ngraph/ops/op_bridge.h"
#include "paddle/fluid/platform/ngraph_helper.h"
namespace paddle {
......@@ -42,3 +43,5 @@ void BuildTopKNode(
} // namespace ngraphs
} // namespace operators
} // namespace paddle
REGISTER_NG_OP(top_k, BuildTopKNode);
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册