From 64bfd8147fc574466f7b5972de926ed0cec00f66 Mon Sep 17 00:00:00 2001 From: qiaolongfei Date: Sat, 15 Apr 2017 17:13:34 +0800 Subject: [PATCH] fix style probelm --- paddle/api/PaddleAPI.h | 8 +++++--- paddle/api/ParameterUpdater.cpp | 10 +++++++--- paddle/function/BufferArgTest.cpp | 2 +- paddle/function/FunctionTest.cpp | 2 +- paddle/function/TensorShapeTest.cpp | 2 +- paddle/function/TensorTypeTest.cpp | 2 +- python/paddle/v2/optimizer.py | 4 ++-- python/paddle/v2/trainer.py | 11 ++++++++--- 8 files changed, 26 insertions(+), 15 deletions(-) diff --git a/paddle/api/PaddleAPI.h b/paddle/api/PaddleAPI.h index c8800519bd..725328ce4d 100644 --- a/paddle/api/PaddleAPI.h +++ b/paddle/api/PaddleAPI.h @@ -19,9 +19,9 @@ limitations under the License. */ #include #include #include +#include "paddle/gserver/gradientmachines/GradientMachine.h" #include "paddle/utils/Common.h" #include "paddle/utils/GlobalConstants.h" -#include "paddle/gserver/gradientmachines/GradientMachine.h" /// Import PaddlePaddle's enumeration into global namespace. using namespace paddle::enumeration_wrapper; // NOLINT @@ -470,7 +470,8 @@ private: enum GradientMatchineCreateMode { CREATE_MODE_NORMAL = paddle::GradientMachine::kNormal, - CREATE_MODE_SGD_SPARSE_CPU_TRAINING = paddle::GradientMachine::kSgdSparseCpuTraining, + CREATE_MODE_SGD_SPARSE_CPU_TRAINING = + paddle::GradientMachine::kSgdSparseCpuTraining, CREATE_MODE_TESTING = paddle::GradientMachine::kTesting }; @@ -819,7 +820,8 @@ private: public: static ParameterUpdater* createLocalUpdater(OptimizationConfig* config); static ParameterUpdater* createRemoteUpdater(OptimizationConfig* config, - int passCount, bool userSparseUpdater); + int passCount, + bool userSparseUpdater); ~ParameterUpdater(); /** diff --git a/paddle/api/ParameterUpdater.cpp b/paddle/api/ParameterUpdater.cpp index e96ccc9285..708379ded5 100644 --- a/paddle/api/ParameterUpdater.cpp +++ b/paddle/api/ParameterUpdater.cpp @@ -32,12 +32,16 @@ ParameterUpdater *ParameterUpdater::createRemoteUpdater( OptimizationConfig *config, int passCount, bool userSparseUpdater) { auto updater = new ParameterUpdater(); auto remoteUpdater = new paddle::RemoteParameterUpdater( - config->m->getConfig(), passCount, nullptr); + config->m->getConfig(), passCount, nullptr); if (userSparseUpdater) { std::unique_ptr remoteUpdaterPtr; remoteUpdaterPtr.reset(remoteUpdater); - auto sparseRemoteUpdater = new paddle::SparseRemoteParameterUpdaterComposite( - config->m->getConfig(), passCount, false, std::move(remoteUpdaterPtr)); + auto sparseRemoteUpdater = + new paddle::SparseRemoteParameterUpdaterComposite( + config->m->getConfig(), + passCount, + false, + std::move(remoteUpdaterPtr)); updater->m->updater.reset(sparseRemoteUpdater); } else { updater->m->updater.reset(remoteUpdater); diff --git a/paddle/function/BufferArgTest.cpp b/paddle/function/BufferArgTest.cpp index 1744f37780..f1a234ab1a 100644 --- a/paddle/function/BufferArgTest.cpp +++ b/paddle/function/BufferArgTest.cpp @@ -12,8 +12,8 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -#include "BufferArg.h" #include +#include "BufferArg.h" #include "paddle/math/MemoryHandle.h" namespace paddle { diff --git a/paddle/function/FunctionTest.cpp b/paddle/function/FunctionTest.cpp index fdf7e631e5..f9ea7c7e4f 100644 --- a/paddle/function/FunctionTest.cpp +++ b/paddle/function/FunctionTest.cpp @@ -12,8 +12,8 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -#include "Function.h" #include +#include "Function.h" #include "paddle/math/SparseMatrix.h" namespace paddle { diff --git a/paddle/function/TensorShapeTest.cpp b/paddle/function/TensorShapeTest.cpp index 45a2e106e7..e19afe0c4d 100644 --- a/paddle/function/TensorShapeTest.cpp +++ b/paddle/function/TensorShapeTest.cpp @@ -12,8 +12,8 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -#include "TensorShape.h" #include +#include "TensorShape.h" namespace paddle { diff --git a/paddle/function/TensorTypeTest.cpp b/paddle/function/TensorTypeTest.cpp index e50e46f3e9..5b5c504ae2 100644 --- a/paddle/function/TensorTypeTest.cpp +++ b/paddle/function/TensorTypeTest.cpp @@ -12,8 +12,8 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -#include "TensorType.h" #include +#include "TensorType.h" namespace paddle { diff --git a/python/paddle/v2/optimizer.py b/python/paddle/v2/optimizer.py index 6fefd7b2f2..7bac1ea3b9 100644 --- a/python/paddle/v2/optimizer.py +++ b/python/paddle/v2/optimizer.py @@ -42,8 +42,8 @@ class Optimizer(object): return swig_api.ParameterUpdater.createLocalUpdater(self.__opt_conf__) def create_remote_updater(self, pass_num, use_sparse_updater): - return swig_api.ParameterUpdater.createRemoteUpdater(self.__opt_conf__, - pass_num, use_sparse_updater) + return swig_api.ParameterUpdater.createRemoteUpdater( + self.__opt_conf__, pass_num, use_sparse_updater) class Momentum(Optimizer): diff --git a/python/paddle/v2/trainer.py b/python/paddle/v2/trainer.py index dc23eb5b0d..80f243b413 100644 --- a/python/paddle/v2/trainer.py +++ b/python/paddle/v2/trainer.py @@ -42,7 +42,12 @@ class SGD(object): :type extra_layers: paddle.v2.config_base.Layer """ - def __init__(self, cost, parameters, update_equation, extra_layers=None, is_local=True): + def __init__(self, + cost, + parameters, + update_equation, + extra_layers=None, + is_local=True): if not isinstance(parameters, v2_parameters.Parameters): raise TypeError('parameters should be parameters') @@ -97,8 +102,8 @@ class SGD(object): if self.__is_local__: updater = self.__optimizer__.create_local_updater() else: - updater = self.__optimizer__.create_remote_updater(num_passes, - self.__use_sparse_updater__) + updater = self.__optimizer__.create_remote_updater( + num_passes, self.__use_sparse_updater__) updater.init(self.__gradient_machine__) self.__gradient_machine__.start() -- GitLab