From 8383e48f6a7cb6dee8c10b9db0067c8a9c17f2e7 Mon Sep 17 00:00:00 2001 From: Helin Wang Date: Wed, 25 Oct 2017 14:11:47 -0700 Subject: [PATCH] Fix new remote updater sending all 0 param / grad to pserver --- paddle/trainer/NewRemoteParameterUpdater.cpp | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/paddle/trainer/NewRemoteParameterUpdater.cpp b/paddle/trainer/NewRemoteParameterUpdater.cpp index 35dcb235e7..7d5216a966 100644 --- a/paddle/trainer/NewRemoteParameterUpdater.cpp +++ b/paddle/trainer/NewRemoteParameterUpdater.cpp @@ -43,11 +43,6 @@ void NewRemoteParameterUpdater::init( const std::vector ¶meters) { ParameterUpdater::init(parameters); - for (auto ¶ : parameters_) { - para->getBuf(PARAMETER_VALUE)->zeroMem(); - para->getBuf(PARAMETER_GRADIENT)->zeroMem(); - } - // create parameter server client. if (useEtcd_) { parameterClient_ = @@ -109,6 +104,8 @@ void NewRemoteParameterUpdater::init( LOG(ERROR) << "got unsupported v1 learning_rate_schedule config: " << trainerConfig_.learning_rate_schedule() << ", set to const"; optimizerConfigV2.set_lr_policy(paddle::OptimizerConfig::Const); + optimizerConfigV2.mutable_const_lr()->set_learning_rate( + trainerConfig_.learning_rate()); } // overwrite optimizerConfigV2 for per-parameter(layer) configs -- GitLab