diff --git a/CMakeLists.txt b/CMakeLists.txt index e0db0d535b3fc661c6398f74e17d2cb048217677..861bb50a2de0249e4e5ac2e2fa1d7a8a7c61bca0 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -64,6 +64,7 @@ include(external/python) # download, build, install python include(external/openblas) # download, build, install openblas include(external/swig) # download, build, install swig include(external/warpctc) # download, build, install warpctc +include(external/any) # download libn::any include(package) # set paddle packages include(cpplint) # set paddle c++ style diff --git a/cmake/external/any.cmake b/cmake/external/any.cmake new file mode 100644 index 0000000000000000000000000000000000000000..8116f235d535917c03deb646ff4ec083a0cdadc7 --- /dev/null +++ b/cmake/external/any.cmake @@ -0,0 +1,20 @@ +INCLUDE(ExternalProject) + +SET(ANY_SOURCE_DIR ${THIRD_PARTY_PATH}/any) + +INCLUDE_DIRECTORIES(${ANY_SOURCE_DIR}/src/linb_any) + +ExternalProject_Add( + linb_any + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY "https://github.com/thelink2012/any.git" + GIT_TAG "8fef1e93710a0edf8d7658999e284a1142c4c020" + PREFIX ${ANY_SOURCE_DIR} + UPDATE_COMMAND "" + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + TEST_COMMAND "" +) + +add_definitions(-DANY_IMPL_ANY_CAST_MOVEABLE) diff --git a/paddle/function/Function.cpp b/paddle/function/Function.cpp index f47d55a4ade97d76e0f1940a2234e34e20efade6..f71c0f681b3bc524ba96c55f1dcad30ef59478c8 100644 --- a/paddle/function/Function.cpp +++ b/paddle/function/Function.cpp @@ -16,66 +16,6 @@ limitations under the License. */ namespace paddle { -template <> -size_t FuncConfig::get(const std::string& key) const { - auto it = valueMap_.find(key); - CHECK(it != valueMap_.end()) << "Cannot find value: '" << key << "'"; - return it->second.s; -} - -template <> -real FuncConfig::get(const std::string& key) const { - auto it = valueMap_.find(key); - CHECK(it != valueMap_.end()) << "Cannot find value: '" << key << "'"; - return it->second.r; -} - -template <> -int FuncConfig::get(const std::string& key) const { - auto it = valueMap_.find(key); - CHECK(it != valueMap_.end()) << "Cannot find value: '" << key << "'"; - return it->second.i; -} - -template <> -bool FuncConfig::get(const std::string& key) const { - auto it = valueMap_.find(key); - CHECK(it != valueMap_.end()) << "Cannot find value: '" << key << "'"; - return it->second.b; -} - -template <> -FuncConfig& FuncConfig::set(const std::string& key, size_t v) { - CHECK_EQ(static_cast(valueMap_.count(key)), 0) << "Duplicated value: " - << key; - valueMap_[key].s = v; - return *this; -} - -template <> -FuncConfig& FuncConfig::set(const std::string& key, real v) { - CHECK_EQ(static_cast(valueMap_.count(key)), 0) << "Duplicated value: " - << key; - valueMap_[key].r = v; - return *this; -} - -template <> -FuncConfig& FuncConfig::set(const std::string& key, int v) { - CHECK_EQ(static_cast(valueMap_.count(key)), 0) << "Duplicated value: " - << key; - valueMap_[key].i = v; - return *this; -} - -template <> -FuncConfig& FuncConfig::set(const std::string& key, bool v) { - CHECK_EQ(static_cast(valueMap_.count(key)), 0) << "Duplicated value: " - << key; - valueMap_[key].b = v; - return *this; -} - void BufferArgs::addArg(const Matrix& arg, const TensorShape& shape, ArgType argType) { diff --git a/paddle/function/Function.h b/paddle/function/Function.h index 3bbeb6e525f85bdde9a54c8d60146eaa30a1bb4d..9ad00c6f370cf64e9cc26f16e62c4d2ddb284003 100644 --- a/paddle/function/Function.h +++ b/paddle/function/Function.h @@ -18,32 +18,49 @@ limitations under the License. */ #include #include "BufferArg.h" #include "paddle/math/Matrix.h" +#include "paddle/utils/Any.h" #include "paddle/utils/ClassRegistrar.h" +#include "paddle/utils/Error.h" namespace paddle { /** * Function Configuration. * The argument type of Function::init. - * Follow-up will consider moving this data structure to Proto inside. */ class FuncConfig { public: - union value { - size_t s; - real r; - int i; - bool b; - }; - template - T get(const std::string& key) const; + T get(const std::string& key, Error* err = nullptr) const { + try { + return any_cast(valueMap_.at(key)); + } catch (std::exception& e) { // could be cast or out of range exception. + if (err) { + *err = Error(e.what()); + } else { + LOG(FATAL) << "Cannot get key " << key << "with error " << e.what(); + } + return T(); + } + } template - FuncConfig& set(const std::string& key, T v); + FuncConfig& set(const std::string& key, T v, Error* err = nullptr) { + auto it = valueMap_.find(key); + if (it != valueMap_.end()) { // already contains key. + if (err) { + *err = Error("Key %s is already set in FuncConfig", key.c_str()); + } else { + LOG(FATAL) << "Key " << key << " is already set in FuncConfig."; + } + return *this; + } + valueMap_[key] = any(v); + return *this; + } protected: - std::map valueMap_; + mutable std::unordered_map valueMap_; }; /** diff --git a/paddle/function/PadOp.cpp b/paddle/function/PadOp.cpp index f1a0d2a1a96f24ddff8cd120681a8bc8cddaf40a..adba7c92ece505eecc74edce6b393cf27fa10ccc 100644 --- a/paddle/function/PadOp.cpp +++ b/paddle/function/PadOp.cpp @@ -25,9 +25,9 @@ void Pad(real* outputs, const int inH, const int inW, const PadConf& pad) { - int cstart = pad.channelStart, cend = pad.channelEnd; - int hstart = pad.heightStart, hend = pad.heightEnd; - int wstart = pad.widthStart, wend = pad.widthEnd; + int cstart = pad.channel[0], cend = pad.channel[1]; + int hstart = pad.height[0], hend = pad.height[1]; + int wstart = pad.width[0], wend = pad.width[1]; int outC = inC + cstart + cend; int outH = inH + hstart + hend; int outW = inW + wstart + wend; @@ -51,9 +51,9 @@ void PadGrad(real* inGrad, const int inH, const int inW, const PadConf& pad) { - int cstart = pad.channelStart, cend = pad.channelEnd; - int hstart = pad.heightStart, hend = pad.heightEnd; - int wstart = pad.widthStart, wend = pad.widthEnd; + int cstart = pad.channel[0], cend = pad.channel[1]; + int hstart = pad.height[0], hend = pad.height[1]; + int wstart = pad.width[0], wend = pad.width[1]; int outC = inC + cstart + cend; int outH = inH + hstart + hend; int outW = inW + wstart + wend; @@ -71,6 +71,12 @@ void PadGrad(real* inGrad, } } +static inline PadConf castToPadConf(const FuncConfig& conf) { + return {conf.get>("channel"), + conf.get>("height"), + conf.get>("width")}; +} + /** * \brief Padding zeros to input according to the specify dimension. * The struct pad_ contains the padding size in each dimension. @@ -127,14 +133,7 @@ void PadGrad(real* inGrad, template class PadFunc : public FunctionBase { public: - void init(const FuncConfig& config) override { - pad_.channelStart = config.get("cstart"); - pad_.channelEnd = config.get("cend"); - pad_.heightStart = config.get("hstart"); - pad_.heightEnd = config.get("hend"); - pad_.widthStart = config.get("wstart"); - pad_.widthEnd = config.get("wend"); - } + void init(const FuncConfig& config) override { pad_ = castToPadConf(config); } void calc(const BufferArgs& inputs, const BufferArgs& outputs) override { CHECK_EQ(1UL, inputs.size()); @@ -175,14 +174,7 @@ private: template class PadGradFunc : public FunctionBase { public: - void init(const FuncConfig& config) override { - pad_.channelStart = config.get("cstart"); - pad_.channelEnd = config.get("cend"); - pad_.heightStart = config.get("hstart"); - pad_.heightEnd = config.get("hend"); - pad_.widthStart = config.get("wstart"); - pad_.widthEnd = config.get("wend"); - } + void init(const FuncConfig& config) override { pad_ = castToPadConf(config); } void calc(const BufferArgs& inputs, const BufferArgs& outputs) override { CHECK_EQ(1UL, inputs.size()); diff --git a/paddle/function/PadOp.h b/paddle/function/PadOp.h index 7b5c730a6a0fa57833e63beba085cb17054ae2f5..0e226ec7370b9897ebdc697ee528b90a37e4ec56 100644 --- a/paddle/function/PadOp.h +++ b/paddle/function/PadOp.h @@ -19,18 +19,12 @@ limitations under the License. */ namespace paddle { struct PadConf { - /// how many values to add before the data along channel dimension. - int channelStart; - /// how many values to add after the data along channel dimension. - int channelEnd; - /// how many values to add before the data along height dimension. - int heightStart; - /// how many values to add after the data along height dimension. - int heightEnd; - /// how many values to add before the data along width dimension. - int widthStart; - /// how many values to add after the data along width dimension. - int widthEnd; + /// how many values to add before/after the data along channel dimension. + std::vector channel; + /// how many values to add before/after the data along height dimension. + std::vector height; + /// how many values to add before/after the data along width dimension. + std::vector width; }; /** diff --git a/paddle/gserver/layers/PadLayer.cpp b/paddle/gserver/layers/PadLayer.cpp index bb618c09f9777785d93995fa7140dd4a5383cd1b..a5ed7e057aea8f065ee752f8c0f0d2d9bdddfc8b 100644 --- a/paddle/gserver/layers/PadLayer.cpp +++ b/paddle/gserver/layers/PadLayer.cpp @@ -36,12 +36,9 @@ bool PadLayer::init(const LayerMap& layerMap, CHECK_EQ(2, pad_conf.pad_c_size()); CHECK_EQ(2, pad_conf.pad_h_size()); CHECK_EQ(2, pad_conf.pad_w_size()); - padc_.push_back(pad_conf.pad_c(0)); - padc_.push_back(pad_conf.pad_c(1)); - padh_.push_back(pad_conf.pad_h(0)); - padh_.push_back(pad_conf.pad_h(1)); - padw_.push_back(pad_conf.pad_w(0)); - padw_.push_back(pad_conf.pad_w(1)); + padc_ = {pad_conf.pad_c(0), pad_conf.pad_c(1)}; + padh_ = {pad_conf.pad_h(0), pad_conf.pad_h(1)}; + padw_ = {pad_conf.pad_w(0), pad_conf.pad_w(1)}; outDims_ = TensorShape(4); setOutDims(0); @@ -49,21 +46,15 @@ bool PadLayer::init(const LayerMap& layerMap, createFunction(forward_, "Pad", FuncConfig() - .set("cstart", padc_[0]) - .set("cend", padc_[1]) - .set("hstart", padh_[0]) - .set("hend", padh_[1]) - .set("wstart", padw_[0]) - .set("wend", padw_[1])); + .set("channel", padc_) + .set("height", padh_) + .set("width", padw_)); createFunction(backward_, "PadGrad", FuncConfig() - .set("cstart", padc_[0]) - .set("cend", padc_[1]) - .set("hstart", padh_[0]) - .set("hend", padh_[1]) - .set("wstart", padw_[0]) - .set("wend", padw_[1])); + .set("channel", padc_) + .set("height", padh_) + .set("width", padw_)); return true; } diff --git a/paddle/gserver/layers/PadLayer.h b/paddle/gserver/layers/PadLayer.h index b2bbf28082e630aeb429ee997a1d43ce7ba05d1c..fe9388d8cc260ed599af0113361f4687f3f4a18b 100644 --- a/paddle/gserver/layers/PadLayer.h +++ b/paddle/gserver/layers/PadLayer.h @@ -38,9 +38,9 @@ protected: void setOutDims(const size_t batchSize); void setTensorDim(const size_t batchSize); - std::vector padc_; - std::vector padh_; - std::vector padw_; + std::vector padc_; + std::vector padh_; + std::vector padw_; TensorShape inDims_; TensorShape outDims_; }; diff --git a/paddle/utils/Any.h b/paddle/utils/Any.h new file mode 100644 index 0000000000000000000000000000000000000000..99a0139accc4988f1e4cce45eeb688a6603c2c31 --- /dev/null +++ b/paddle/utils/Any.h @@ -0,0 +1,35 @@ +/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#pragma once +#if __cplusplus > 201402L +#include + +namespace paddle { +// using std::any for C++ 17 +using std::any; +using std::any_cast; +using std::bad_any_cast; +} // namespace paddle + +#else +#include + +namespace paddle { +// use linb::any for C++ 11 +using linb::any; +using linb::any_cast; +using linb::bad_any_cast; +} // namespace paddle +#endif