diff --git a/paddle/function/Function.h b/paddle/function/Function.h index 561cd5fdc793650901948a0f52829148fcab95f0..9ad00c6f370cf64e9cc26f16e62c4d2ddb284003 100644 --- a/paddle/function/Function.h +++ b/paddle/function/Function.h @@ -20,6 +20,7 @@ limitations under the License. */ #include "paddle/math/Matrix.h" #include "paddle/utils/Any.h" #include "paddle/utils/ClassRegistrar.h" +#include "paddle/utils/Error.h" namespace paddle { @@ -30,12 +31,30 @@ namespace paddle { class FuncConfig { public: template - T get(const std::string& key) const { - return any_cast(valueMap_[key]); + T get(const std::string& key, Error* err = nullptr) const { + try { + return any_cast(valueMap_.at(key)); + } catch (std::exception& e) { // could be cast or out of range exception. + if (err) { + *err = Error(e.what()); + } else { + LOG(FATAL) << "Cannot get key " << key << "with error " << e.what(); + } + return T(); + } } template - FuncConfig& set(const std::string& key, T v) { + FuncConfig& set(const std::string& key, T v, Error* err = nullptr) { + auto it = valueMap_.find(key); + if (it != valueMap_.end()) { // already contains key. + if (err) { + *err = Error("Key %s is already set in FuncConfig", key.c_str()); + } else { + LOG(FATAL) << "Key " << key << " is already set in FuncConfig."; + } + return *this; + } valueMap_[key] = any(v); return *this; } diff --git a/paddle/function/PadOp.cpp b/paddle/function/PadOp.cpp index f1a0d2a1a96f24ddff8cd120681a8bc8cddaf40a..adba7c92ece505eecc74edce6b393cf27fa10ccc 100644 --- a/paddle/function/PadOp.cpp +++ b/paddle/function/PadOp.cpp @@ -25,9 +25,9 @@ void Pad(real* outputs, const int inH, const int inW, const PadConf& pad) { - int cstart = pad.channelStart, cend = pad.channelEnd; - int hstart = pad.heightStart, hend = pad.heightEnd; - int wstart = pad.widthStart, wend = pad.widthEnd; + int cstart = pad.channel[0], cend = pad.channel[1]; + int hstart = pad.height[0], hend = pad.height[1]; + int wstart = pad.width[0], wend = pad.width[1]; int outC = inC + cstart + cend; int outH = inH + hstart + hend; int outW = inW + wstart + wend; @@ -51,9 +51,9 @@ void PadGrad(real* inGrad, const int inH, const int inW, const PadConf& pad) { - int cstart = pad.channelStart, cend = pad.channelEnd; - int hstart = pad.heightStart, hend = pad.heightEnd; - int wstart = pad.widthStart, wend = pad.widthEnd; + int cstart = pad.channel[0], cend = pad.channel[1]; + int hstart = pad.height[0], hend = pad.height[1]; + int wstart = pad.width[0], wend = pad.width[1]; int outC = inC + cstart + cend; int outH = inH + hstart + hend; int outW = inW + wstart + wend; @@ -71,6 +71,12 @@ void PadGrad(real* inGrad, } } +static inline PadConf castToPadConf(const FuncConfig& conf) { + return {conf.get>("channel"), + conf.get>("height"), + conf.get>("width")}; +} + /** * \brief Padding zeros to input according to the specify dimension. * The struct pad_ contains the padding size in each dimension. @@ -127,14 +133,7 @@ void PadGrad(real* inGrad, template class PadFunc : public FunctionBase { public: - void init(const FuncConfig& config) override { - pad_.channelStart = config.get("cstart"); - pad_.channelEnd = config.get("cend"); - pad_.heightStart = config.get("hstart"); - pad_.heightEnd = config.get("hend"); - pad_.widthStart = config.get("wstart"); - pad_.widthEnd = config.get("wend"); - } + void init(const FuncConfig& config) override { pad_ = castToPadConf(config); } void calc(const BufferArgs& inputs, const BufferArgs& outputs) override { CHECK_EQ(1UL, inputs.size()); @@ -175,14 +174,7 @@ private: template class PadGradFunc : public FunctionBase { public: - void init(const FuncConfig& config) override { - pad_.channelStart = config.get("cstart"); - pad_.channelEnd = config.get("cend"); - pad_.heightStart = config.get("hstart"); - pad_.heightEnd = config.get("hend"); - pad_.widthStart = config.get("wstart"); - pad_.widthEnd = config.get("wend"); - } + void init(const FuncConfig& config) override { pad_ = castToPadConf(config); } void calc(const BufferArgs& inputs, const BufferArgs& outputs) override { CHECK_EQ(1UL, inputs.size()); diff --git a/paddle/function/PadOp.h b/paddle/function/PadOp.h index 7b5c730a6a0fa57833e63beba085cb17054ae2f5..0e226ec7370b9897ebdc697ee528b90a37e4ec56 100644 --- a/paddle/function/PadOp.h +++ b/paddle/function/PadOp.h @@ -19,18 +19,12 @@ limitations under the License. */ namespace paddle { struct PadConf { - /// how many values to add before the data along channel dimension. - int channelStart; - /// how many values to add after the data along channel dimension. - int channelEnd; - /// how many values to add before the data along height dimension. - int heightStart; - /// how many values to add after the data along height dimension. - int heightEnd; - /// how many values to add before the data along width dimension. - int widthStart; - /// how many values to add after the data along width dimension. - int widthEnd; + /// how many values to add before/after the data along channel dimension. + std::vector channel; + /// how many values to add before/after the data along height dimension. + std::vector height; + /// how many values to add before/after the data along width dimension. + std::vector width; }; /** diff --git a/paddle/gserver/layers/PadLayer.cpp b/paddle/gserver/layers/PadLayer.cpp index bb618c09f9777785d93995fa7140dd4a5383cd1b..a5ed7e057aea8f065ee752f8c0f0d2d9bdddfc8b 100644 --- a/paddle/gserver/layers/PadLayer.cpp +++ b/paddle/gserver/layers/PadLayer.cpp @@ -36,12 +36,9 @@ bool PadLayer::init(const LayerMap& layerMap, CHECK_EQ(2, pad_conf.pad_c_size()); CHECK_EQ(2, pad_conf.pad_h_size()); CHECK_EQ(2, pad_conf.pad_w_size()); - padc_.push_back(pad_conf.pad_c(0)); - padc_.push_back(pad_conf.pad_c(1)); - padh_.push_back(pad_conf.pad_h(0)); - padh_.push_back(pad_conf.pad_h(1)); - padw_.push_back(pad_conf.pad_w(0)); - padw_.push_back(pad_conf.pad_w(1)); + padc_ = {pad_conf.pad_c(0), pad_conf.pad_c(1)}; + padh_ = {pad_conf.pad_h(0), pad_conf.pad_h(1)}; + padw_ = {pad_conf.pad_w(0), pad_conf.pad_w(1)}; outDims_ = TensorShape(4); setOutDims(0); @@ -49,21 +46,15 @@ bool PadLayer::init(const LayerMap& layerMap, createFunction(forward_, "Pad", FuncConfig() - .set("cstart", padc_[0]) - .set("cend", padc_[1]) - .set("hstart", padh_[0]) - .set("hend", padh_[1]) - .set("wstart", padw_[0]) - .set("wend", padw_[1])); + .set("channel", padc_) + .set("height", padh_) + .set("width", padw_)); createFunction(backward_, "PadGrad", FuncConfig() - .set("cstart", padc_[0]) - .set("cend", padc_[1]) - .set("hstart", padh_[0]) - .set("hend", padh_[1]) - .set("wstart", padw_[0]) - .set("wend", padw_[1])); + .set("channel", padc_) + .set("height", padh_) + .set("width", padw_)); return true; } diff --git a/paddle/gserver/layers/PadLayer.h b/paddle/gserver/layers/PadLayer.h index b2bbf28082e630aeb429ee997a1d43ce7ba05d1c..fe9388d8cc260ed599af0113361f4687f3f4a18b 100644 --- a/paddle/gserver/layers/PadLayer.h +++ b/paddle/gserver/layers/PadLayer.h @@ -38,9 +38,9 @@ protected: void setOutDims(const size_t batchSize); void setTensorDim(const size_t batchSize); - std::vector padc_; - std::vector padh_; - std::vector padw_; + std::vector padc_; + std::vector padh_; + std::vector padw_; TensorShape inDims_; TensorShape outDims_; }; diff --git a/paddle/utils/Any.h b/paddle/utils/Any.h index 0352e9805425720edb165d08a93f05fe1dbe3892..99a0139accc4988f1e4cce45eeb688a6603c2c31 100644 --- a/paddle/utils/Any.h +++ b/paddle/utils/Any.h @@ -20,6 +20,7 @@ namespace paddle { // using std::any for C++ 17 using std::any; using std::any_cast; +using std::bad_any_cast; } // namespace paddle #else @@ -29,5 +30,6 @@ namespace paddle { // use linb::any for C++ 11 using linb::any; using linb::any_cast; +using linb::bad_any_cast; } // namespace paddle #endif