From f6a940936b5f44ebf99a9925991158fdd3beaffd Mon Sep 17 00:00:00 2001 From: tensor-tang Date: Tue, 8 Aug 2017 21:22:15 +0800 Subject: [PATCH] remove unused comments, refine and rename --- paddle/gserver/layers/MkldnnFcLayer.cpp | 4 ++-- paddle/gserver/layers/MkldnnFcLayer.h | 4 ++-- paddle/gserver/layers/MkldnnLayer.cpp | 9 ++++----- paddle/gserver/layers/MkldnnLayer.h | 4 ++-- paddle/gserver/tests/MkldnnTester.cpp | 2 +- python/paddle/trainer/config_parser.py | 4 ++-- 6 files changed, 13 insertions(+), 14 deletions(-) diff --git a/paddle/gserver/layers/MkldnnFcLayer.cpp b/paddle/gserver/layers/MkldnnFcLayer.cpp index 7e09ed33d2..e4c4d4675d 100644 --- a/paddle/gserver/layers/MkldnnFcLayer.cpp +++ b/paddle/gserver/layers/MkldnnFcLayer.cpp @@ -50,7 +50,7 @@ bool MkldnnFcLayer::init(const LayerMap& layerMap, return true; } -void MkldnnFcLayer::cvtWgtFromPaddle() { +void MkldnnFcLayer::convertWeightsFromPaddle() { if (FLAGS_use_mkldnn_wgt) { return; } @@ -75,7 +75,7 @@ void MkldnnFcLayer::cvtWgtFromPaddle() { hasInitedWgt_ = true; } -void MkldnnFcLayer::cvtWgtToPaddle() { +void MkldnnFcLayer::convertWeightsToPaddle() { MatrixPtr dnnWgt = weight_->getW(); MatrixPtr paddleWgt; dnnWgt->transpose(paddleWgt, true); diff --git a/paddle/gserver/layers/MkldnnFcLayer.h b/paddle/gserver/layers/MkldnnFcLayer.h index 0064fc4727..f891052284 100644 --- a/paddle/gserver/layers/MkldnnFcLayer.h +++ b/paddle/gserver/layers/MkldnnFcLayer.h @@ -44,9 +44,9 @@ public: bool init(const LayerMap& layerMap, const ParameterMap& parameterMap) override; - void cvtWgtFromPaddle() override; + void convertWeightsFromPaddle() override; - void cvtWgtToPaddle() override; + void convertWeightsToPaddle() override; void forward(PassType passType) override; diff --git a/paddle/gserver/layers/MkldnnLayer.cpp b/paddle/gserver/layers/MkldnnLayer.cpp index c909fe274d..6bd2b15a17 100644 --- a/paddle/gserver/layers/MkldnnLayer.cpp +++ b/paddle/gserver/layers/MkldnnLayer.cpp @@ -14,7 +14,6 @@ limitations under the License. */ #include "MkldnnLayer.h" -// using namespace mkldnn; // NOLINT using mem = mkldnn::memory; // NOLINT typedef mem::format format; typedef mkldnn::inner_product_forward fc_fwd; @@ -94,7 +93,7 @@ void MkldnnLayer::mkldnnForwardFC(int bs, // if input size changed, reset it resetForwardFC(bs, ic, ih, iw, botData, oc, topData, wgtData, biasData); - this->cvtWgtFromPaddle(); + this->convertWeightsFromPaddle(); // update input, since the data might be changed if this is after data layer inVal_->set_data_handle(botData); @@ -208,9 +207,9 @@ void MkldnnLayer::mkldnnBackwardFC(int bs, } void MkldnnLayer::printSizeInfo() { - VLOG(DNN_SIZES) << "bs: " << bs_ << ", ic: " << ic_ << ", ih: " << ih_ - << ", iw: " << iw_ << ", oc: " << oc_ << ", oh: " << oh_ - << ", ow: " << ow_; + VLOG(DNN_SIZES) << getName() << ": bs: " << bs_ << ", ic: " << ic_ + << ", ih: " << ih_ << ", iw: " << iw_ << ", oc: " << oc_ + << ", oh: " << oh_ << ", ow: " << ow_; } mem::desc MkldnnLayer::createMD(mem::dims dims, diff --git a/paddle/gserver/layers/MkldnnLayer.h b/paddle/gserver/layers/MkldnnLayer.h index c653eb9985..e5c93500c7 100644 --- a/paddle/gserver/layers/MkldnnLayer.h +++ b/paddle/gserver/layers/MkldnnLayer.h @@ -87,13 +87,13 @@ public: * convert weight from paddle format to mkldnn format * weight_ will be override */ - virtual void cvtWgtFromPaddle() { ; } + virtual void convertWeightsFromPaddle() {} /** * convert mkldnn weight to paddle format * weight_ will be override */ - virtual void cvtWgtToPaddle() { ; } + virtual void convertWeightsToPaddle() {} void resetForwardFC(int bs, int ic, diff --git a/paddle/gserver/tests/MkldnnTester.cpp b/paddle/gserver/tests/MkldnnTester.cpp index ef99b384a9..59b3861df8 100644 --- a/paddle/gserver/tests/MkldnnTester.cpp +++ b/paddle/gserver/tests/MkldnnTester.cpp @@ -149,7 +149,7 @@ void MkldnnTester::checkBackwardWgts() { const MkldnnLayerPtr dnnlayer = std::dynamic_pointer_cast(dnnLayer_); CHECK(dnnlayer); - dnnlayer->cvtWgtToPaddle(); + dnnlayer->convertWeightsToPaddle(); for (size_t i = 0; i < parameters_[DNN].size(); ++i) { const VectorPtr& dnn = parameters_[DNN][i]->getBuf(PARAMETER_VALUE); const VectorPtr& ref = parameters_[REF][i]->getBuf(PARAMETER_VALUE); diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index dc07af343d..3213df5186 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -1614,13 +1614,13 @@ class FCLayer(LayerBase): error_clipping_threshold=None, **xargs): use_mkldnn = bool(int(g_command_config_args.get("use_mkldnn", 0))) + use_mkldnn_wgt = bool( + int(g_command_config_args.get("use_mkldnn_wgt", 0))) if use_mkldnn: self.layer_type = 'mkldnn_fc' config_assert( len(inputs) == 1, "MkldnnFCLayer support one and only one input!") - use_mkldnn_wgt = bool( - int(g_command_config_args.get("use_mkldnn_wgt", 0))) super(FCLayer, self).__init__( name, self.layer_type, size, inputs=inputs, **xargs) for input_index in xrange(len(self.inputs)): -- GitLab