From be523baad2f4f494d08fec3a40737578d47e25e1 Mon Sep 17 00:00:00 2001 From: flame Date: Fri, 1 Mar 2019 16:57:57 +0800 Subject: [PATCH] Add anakin conv2d/relu/sigmoid/tanh converter (#15997) * add activation op * test conv2d relu sigmoid tanh --- .../inference/anakin/convert/CMakeLists.txt | 4 +- .../inference/anakin/convert/activation.cc | 59 +++++++++++++ .../convert/{registrar.h => activation.h} | 47 +++++----- .../fluid/inference/anakin/convert/conv2d.cc | 87 +++++++++++++++++++ .../anakin/convert/{registrar.cc => conv2d.h} | 21 ++--- paddle/fluid/inference/anakin/convert/fc.cc | 4 - .../inference/anakin/convert/op_converter.h | 1 - .../anakin/convert/test_activation_op.cc | 56 ++++++++++++ .../anakin/convert/test_conv2d_op.cc | 62 +++++++++++++ 9 files changed, 301 insertions(+), 40 deletions(-) create mode 100644 paddle/fluid/inference/anakin/convert/activation.cc rename paddle/fluid/inference/anakin/convert/{registrar.h => activation.h} (51%) create mode 100644 paddle/fluid/inference/anakin/convert/conv2d.cc rename paddle/fluid/inference/anakin/convert/{registrar.cc => conv2d.h} (66%) create mode 100644 paddle/fluid/inference/anakin/convert/test_activation_op.cc create mode 100644 paddle/fluid/inference/anakin/convert/test_conv2d_op.cc diff --git a/paddle/fluid/inference/anakin/convert/CMakeLists.txt b/paddle/fluid/inference/anakin/convert/CMakeLists.txt index f5bfee861..fffec575c 100644 --- a/paddle/fluid/inference/anakin/convert/CMakeLists.txt +++ b/paddle/fluid/inference/anakin/convert/CMakeLists.txt @@ -1,2 +1,4 @@ -cc_library(anakin_op_converter SRCS fc.cc registrar.cc DEPS anakin_engine framework_proto scope) +cc_library(anakin_op_converter SRCS fc.cc conv2d.cc activation.cc DEPS anakin_engine framework_proto scope operator op_registry) cc_test(test_anakin_fc SRCS test_fc_op.cc DEPS anakin_op_converter mul_op) +cc_test(test_anakin_conv2d SRCS test_conv2d_op.cc DEPS ${FLUID_CORE_MODULES} ${GLOB_OPERATOR_DEPS} anakin_op_converter conv_op im2col vol2col depthwise_conv SERIAL) +cc_test(test_anakin_activation SRCS test_activation_op.cc DEPS ${FLUID_CORE_MODULES} ${GLOB_OPERATOR_DEPS} activation_op anakin_op_converter SERIAL) diff --git a/paddle/fluid/inference/anakin/convert/activation.cc b/paddle/fluid/inference/anakin/convert/activation.cc new file mode 100644 index 000000000..786869e3d --- /dev/null +++ b/paddle/fluid/inference/anakin/convert/activation.cc @@ -0,0 +1,59 @@ +// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "paddle/fluid/inference/anakin/convert/activation.h" +#include +#include + +using anakin::graph::GraphGlobalMem; +using anakin::AK_FLOAT; +using anakin::saber::NV; +using anakin::saber::Shape; + +namespace paddle { +namespace inference { +namespace anakin { + +ActivationOpConverter::ActivationOpConverter(const std::string &op_type) + : op_type_(op_type) { + auto it = anakin_ops_type_.find(op_type_); + PADDLE_ENFORCE(it != anakin_ops_type_.end(), + "activation op type is not support"); + anakin_op_type_ = it->second; +} + +void ActivationOpConverter::operator()(const framework::proto::OpDesc &op, + const framework::Scope &scope, + bool test_mode) { + framework::OpDesc op_desc(op, nullptr); + PADDLE_ENFORCE_EQ(op_desc.Input("X").size(), 1); + PADDLE_ENFORCE_EQ(op_desc.Output("Out").size(), 1); + + auto op_name = op_desc.Type() + ":" + op_desc.Output("Out").front(); + auto input_name = op_desc.Input("X").front(); + auto output_name = op_desc.Output("Out").front(); + engine_->AddOp(op_name, "Activation", {input_name}, {output_name}); + engine_->AddOpAttr(op_name, "type", anakin_op_type_); + if (op_type_ == "relu") { + engine_->AddOpAttr(op_name, "alpha", 0); + } +} + +} // namespace anakin +} // namespace inference +} // namespace paddle + +REGISTER_ANAKIN_OP_CONVERTER(relu, ReluOpConverter); +REGISTER_ANAKIN_OP_CONVERTER(sigmoid, SigmoidOpConverter); +REGISTER_ANAKIN_OP_CONVERTER(tanh, TanhOpConverter); diff --git a/paddle/fluid/inference/anakin/convert/registrar.h b/paddle/fluid/inference/anakin/convert/activation.h similarity index 51% rename from paddle/fluid/inference/anakin/convert/registrar.h rename to paddle/fluid/inference/anakin/convert/activation.h index afce66ca0..f1db154a1 100644 --- a/paddle/fluid/inference/anakin/convert/registrar.h +++ b/paddle/fluid/inference/anakin/convert/activation.h @@ -14,45 +14,44 @@ #pragma once -#include #include -#include #include -#include +#include "paddle/fluid/inference/anakin/convert/op_converter.h" namespace paddle { namespace inference { namespace anakin { -class AnakinOpConverter; - -class OpRegister { +class ActivationOpConverter : public AnakinOpConverter { public: - OpRegister() = default; - std::shared_ptr Get(const std::string &name); - static OpRegister *instance(); - void OpRegisterFn(const std::string &name, - std::function()> fn) { - registry_[name] = fn; - } + explicit ActivationOpConverter(const std::string &op_type); + + virtual void operator()(const framework::proto::OpDesc &op, + const framework::Scope &scope, + bool test_mode) override; + virtual ~ActivationOpConverter() {} private: - using RegisterFnType = std::function()>; - std::map()>> - registry_; + std::string op_type_; + std::string anakin_op_type_; + std::map anakin_ops_type_{ + {"relu", "Relu"}, {"tanh", "TanH"}, {"sigmoid", "Sigmoid"}}; }; -template -class Registrar { +class ReluOpConverter : public ActivationOpConverter { public: - Registrar(const std::string &name, Args... args) { - std::shared_ptr converter = - std::make_shared(std::move(args)...); - OpRegister::instance()->OpRegisterFn(name, - [converter]() { return converter; }); - } + ReluOpConverter() : ActivationOpConverter("relu") {} }; +class TanhOpConverter : public ActivationOpConverter { + public: + TanhOpConverter() : ActivationOpConverter("tanh") {} +}; + +class SigmoidOpConverter : public ActivationOpConverter { + public: + SigmoidOpConverter() : ActivationOpConverter("tanh") {} +}; } // namespace anakin } // namespace inference } // namespace paddle diff --git a/paddle/fluid/inference/anakin/convert/conv2d.cc b/paddle/fluid/inference/anakin/convert/conv2d.cc new file mode 100644 index 000000000..842d7d8fb --- /dev/null +++ b/paddle/fluid/inference/anakin/convert/conv2d.cc @@ -0,0 +1,87 @@ +// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "paddle/fluid/inference/anakin/convert/conv2d.h" +#include +#include +#include + +using anakin::graph::GraphGlobalMem; +using anakin::AK_FLOAT; +using anakin::saber::NV; +using anakin::saber::Shape; +using anakin::PTuple; + +namespace paddle { +namespace inference { +namespace anakin { + +void Conv2dOpConverter::operator()(const framework::proto::OpDesc &op, + const framework::Scope &scope, + bool test_mode) { + framework::OpDesc op_desc(op, nullptr); + PADDLE_ENFORCE_EQ(op_desc.Input("Input").size(), 1UL); + PADDLE_ENFORCE_EQ(op_desc.Input("Filter").size(), 1UL); + PADDLE_ENFORCE_EQ(op_desc.Output("Output").size(), 1UL); + + auto input_name = op_desc.Input("Input").front(); + auto output_name = op_desc.Output("Output").front(); + auto op_name = op_desc.Type() + ":" + op_desc.Output("Output").front(); + engine_->AddOp(op_name, "Convolution", {input_name}, {output_name}); + + auto *filter_v = scope.FindVar(op_desc.Input("Filter").front()); + PADDLE_ENFORCE_NOT_NULL(filter_v); + auto *filter_t = filter_v->GetMutable(); + std::unique_ptr weight_tensor( + new framework::LoDTensor()); + weight_tensor->Resize(filter_t->dims()); + TensorCopySync((*filter_t), platform::CPUPlace(), weight_tensor.get()); + + auto *weight_data = weight_tensor->mutable_data(platform::CPUPlace()); + PADDLE_ENFORCE_EQ(weight_tensor->dims().size(), 4UL); + + // const int n_output = weight_tensor->dims()[0]; + const int n_input = weight_tensor->dims()[1]; + const int filter_h = weight_tensor->dims()[2]; + const int filter_w = weight_tensor->dims()[3]; + auto filter_num = n_input * filter_h * filter_w; + engine_->AddOpAttr(op_name, "filter_num", filter_num); + engine_->AddOpAttr>(op_name, "kernel_size", {filter_h, filter_w}); + auto strides = boost::get>(op_desc.GetAttr("strides")); + engine_->AddOpAttr>(op_name, "strides", strides); + auto paddings = boost::get>(op_desc.GetAttr("paddings")); + engine_->AddOpAttr>(op_name, "padding", paddings); + auto dilations = boost::get>(op_desc.GetAttr("dilations")); + engine_->AddOpAttr>(op_name, "dilation_rate", dilations); + const int groups = boost::get(op_desc.GetAttr("groups")); + engine_->AddOpAttr(op_name, "group", groups); + engine_->AddOpAttr(op_name, "axis", 1); + engine_->AddOpAttr(op_name, "bias_term", false); + + auto weight_shape = framework::vectorize2int(filter_t->dims()); + Shape anakin_shape(weight_shape); + auto *weight1 = + GraphGlobalMem::Global().template new_block(anakin_shape); + float *cpu_data = static_cast(weight1->h_tensor().mutable_data()); + std::copy_n(weight_tensor->data(), weight_tensor->numel(), cpu_data); + weight1->d_tensor().set_shape(anakin_shape); + weight1->d_tensor().copy_from(weight1->h_tensor()); + engine_->AddOpAttr(op_name, "weight_1", *weight1); +} + +} // namespace anakin +} // namespace inference +} // namespace paddle + +REGISTER_ANAKIN_OP_CONVERTER(conv2d, Conv2dOpConverter); diff --git a/paddle/fluid/inference/anakin/convert/registrar.cc b/paddle/fluid/inference/anakin/convert/conv2d.h similarity index 66% rename from paddle/fluid/inference/anakin/convert/registrar.cc rename to paddle/fluid/inference/anakin/convert/conv2d.h index 701ebdb2d..75a30c10d 100644 --- a/paddle/fluid/inference/anakin/convert/registrar.cc +++ b/paddle/fluid/inference/anakin/convert/conv2d.h @@ -12,22 +12,23 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/anakin/convert/registrar.h" +#pragma once + +#include "paddle/fluid/inference/anakin/convert/op_converter.h" namespace paddle { namespace inference { namespace anakin { -std::shared_ptr OpRegister::Get(const std::string &name) { - auto it = registry_.find(name); - if (it == registry_.end()) return nullptr; - return it->second(); -} +class Conv2dOpConverter : public AnakinOpConverter { + public: + Conv2dOpConverter() = default; -OpRegister *OpRegister::instance() { - static OpRegister factory; - return &factory; -} + virtual void operator()(const framework::proto::OpDesc &op, + const framework::Scope &scope, + bool test_mode) override; + virtual ~Conv2dOpConverter() {} +}; } // namespace anakin } // namespace inference diff --git a/paddle/fluid/inference/anakin/convert/fc.cc b/paddle/fluid/inference/anakin/convert/fc.cc index 5e32e4a33..c88e3af33 100644 --- a/paddle/fluid/inference/anakin/convert/fc.cc +++ b/paddle/fluid/inference/anakin/convert/fc.cc @@ -17,12 +17,8 @@ using anakin::graph::GraphGlobalMem; using anakin::AK_FLOAT; -using anakin::Precision; using anakin::saber::NV; -using anakin::saber::X86; using anakin::saber::Shape; -using anakin::PBlock; -using anakin::PTuple; namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/anakin/convert/op_converter.h b/paddle/fluid/inference/anakin/convert/op_converter.h index 7eb6ed257..ee4d79487 100644 --- a/paddle/fluid/inference/anakin/convert/op_converter.h +++ b/paddle/fluid/inference/anakin/convert/op_converter.h @@ -22,7 +22,6 @@ #include "paddle/fluid/framework/block_desc.h" #include "paddle/fluid/framework/op_registry.h" #include "paddle/fluid/framework/scope.h" -#include "paddle/fluid/inference/anakin/convert/registrar.h" #include "paddle/fluid/inference/anakin/engine.h" #include "paddle/fluid/inference/utils/singleton.h" #include "saber/saber_types.h" diff --git a/paddle/fluid/inference/anakin/convert/test_activation_op.cc b/paddle/fluid/inference/anakin/convert/test_activation_op.cc new file mode 100644 index 000000000..356dfea29 --- /dev/null +++ b/paddle/fluid/inference/anakin/convert/test_activation_op.cc @@ -0,0 +1,56 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include +#include "paddle/fluid/inference/anakin/convert/activation.h" +#include "paddle/fluid/inference/anakin/convert/op_converter.h" +#include "paddle/fluid/inference/anakin/convert/ut_helper.h" + +namespace paddle { +namespace inference { +namespace anakin { + +static void test_activation_op(const std::string &op_type) { + auto *converter = Registry::Global().Lookup(op_type); + PADDLE_ENFORCE(converter != nullptr); + std::unordered_set parameters; + framework::Scope scope; + AnakinConvertValidation validator(parameters, scope); + validator.DeclInputVar("act-X", {10, 6, 1, 1}); + validator.DeclOutputVar("act-Out", {10, 6, 1, 1}); + framework::OpDesc desc; + desc.SetType(op_type); + desc.SetInput("X", {"act-X"}); + desc.SetOutput("Out", {"act-Out"}); + + LOG(INFO) << "set OP"; + validator.SetOp(*desc.Proto()); + LOG(INFO) << "execute"; + + validator.Execute(5); +} + +TEST(relu_op, test) { test_activation_op("relu"); } +TEST(sigm_op, test) { test_activation_op("sigmoid"); } +TEST(tanh_op, test) { test_activation_op("tanh"); } +} // namespace anakin +} // namespace inference +} // namespace paddle + +USE_OP(relu); +USE_OP(sigmoid); +USE_OP(tanh); +USE_ANAKIN_CONVERTER(relu); +USE_ANAKIN_CONVERTER(sigmoid); +USE_ANAKIN_CONVERTER(tanh); diff --git a/paddle/fluid/inference/anakin/convert/test_conv2d_op.cc b/paddle/fluid/inference/anakin/convert/test_conv2d_op.cc new file mode 100644 index 000000000..3049d3d03 --- /dev/null +++ b/paddle/fluid/inference/anakin/convert/test_conv2d_op.cc @@ -0,0 +1,62 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include +#include "paddle/fluid/inference/anakin/convert/conv2d.h" +#include "paddle/fluid/inference/anakin/convert/op_converter.h" +#include "paddle/fluid/inference/anakin/convert/ut_helper.h" + +namespace paddle { +namespace inference { +namespace anakin { + +TEST(conv2d_op, test) { + auto* conv2d_converter = + Registry::Global().Lookup("conv2d"); + ASSERT_TRUE(conv2d_converter != nullptr); + std::unordered_set parameters({"conv2d-Y"}); + framework::Scope scope; + AnakinConvertValidation validator(parameters, scope); + validator.DeclInputVar("conv2d-X", {1, 2, 5, 5}); + validator.DeclParamVar("conv2d-Y", {3, 2, 3, 3}); + validator.DeclOutputVar("conv2d-Out", {1, 3, 5, 5}); + + // Prepare Op description + framework::OpDesc desc; + desc.SetType("conv2d"); + desc.SetInput("Input", {"conv2d-X"}); + desc.SetInput("Filter", {"conv2d-Y"}); + desc.SetOutput("Output", {"conv2d-Out"}); + + const std::vector strides({1, 1}); + const std::vector paddings({1, 1}); + const std::vector dilations({1, 1}); + const int groups = 1; + + desc.SetAttr("strides", strides); + desc.SetAttr("paddings", paddings); + desc.SetAttr("dilations", dilations); + desc.SetAttr("groups", groups); + + validator.SetOp(*desc.Proto()); + + validator.Execute(3); +} + +} // namespace anakin +} // namespace inference +} // namespace paddle + +USE_OP(conv2d); +USE_ANAKIN_CONVERTER(conv2d); -- GitLab