NewRemoteParameterUpdater.cpp 4.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "NewRemoteParameterUpdater.h"
#include "Trainer.h"
#include "paddle/utils/Stat.h"

DECLARE_int32(trainer_id);
DECLARE_string(save_dir);

namespace paddle {
NewRemoteParameterUpdater::NewRemoteParameterUpdater(
    const OptimizationConfig &config, const std::string pserverSpec)
W
wuyi05 已提交
25 26
    : trainerConfig_(config),
      parameterClient_(-1),
27 28 29
      newParameters_(nullptr),
      newGradients_(nullptr),
      pserverSpec_(pserverSpec) {}
30

31 32 33 34 35 36 37 38 39 40 41
NewRemoteParameterUpdater::NewRemoteParameterUpdater(
    const OptimizationConfig &config,
    const std::string pserverSpec,
    const bool useEtcd)
    : trainerConfig_(config),
      parameterClient_(-1),
      newParameters_(nullptr),
      newGradients_(nullptr),
      pserverSpec_(pserverSpec),
      useEtcd_(useEtcd) {}

42 43 44 45 46 47 48 49 50 51
void NewRemoteParameterUpdater::init(
    const std::vector<ParameterPtr> &parameters) {
  ParameterUpdater::init(parameters);

  for (auto &para : parameters_) {
    para->getBuf(PARAMETER_VALUE)->zeroMem();
    para->getBuf(PARAMETER_GRADIENT)->zeroMem();
  }

  // create parameter server client.
52 53 54 55 56 57 58
  if (useEtcd_) {
    parameterClient_ = paddle_new_etcd_pserver_client(
        (char *)pserverSpec_.c_str(), FLAGS_trainer_id == 0);
  } else {
    parameterClient_ = paddle_new_pserver_client((char *)pserverSpec_.c_str(),
                                                 FLAGS_trainer_id == 0);
  }
59 60

  // init new parameter and gradient.
Q
qiaolongfei 已提交
61 62
  newParameters_ = initNewParameter(PARAMETER_VALUE);
  newGradients_ = initNewParameter(PARAMETER_GRADIENT);
63 64 65 66 67 68 69

  // init parameter, one trainer will get the opportunity to int parameter and
  // send them to parameter server. Others will get the initialized parameter
  // from parameter server
  if (paddle_begin_init_params(parameterClient_)) {
    LOG(INFO) << "paddle_begin_init_params start";
    for (int i = 0; i < parameterSize(); ++i) {
Q
qiaolongfei 已提交
70
      auto paramConfig = parameters_[i]->getConfig();
W
wuyi05 已提交
71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
      LOG(INFO) << "old param config: " << paramConfig.DebugString();
      // FIXME(typhoonzero): convert old paramConfig to optimizerConfig
      OptimizerConfig optimizeConfigV2;
      auto sgdConfigV2 = optimizeConfigV2.mutable_sgd();
      sgdConfigV2->set_momentum(paramConfig.momentum());
      sgdConfigV2->set_decay(paramConfig.decay_rate());
      optimizeConfigV2.set_lr_policy(paddle::OptimizerConfig::Const);
      auto constlr = optimizeConfigV2.mutable_const_lr();
      constlr->set_learning_rate(paramConfig.learning_rate());
      if (trainerConfig_.algorithm() == "sgd") {
        optimizeConfigV2.set_optimizer(paddle::OptimizerConfig::SGD);
        // FIXME: config all algorithms
      } else {
        optimizeConfigV2.set_optimizer(paddle::OptimizerConfig::SGD);
      }
      std::string bytes = optimizeConfigV2.SerializeAsString();
Q
qiaolongfei 已提交
87 88 89 90
      const char *array = bytes.data();
      int size = (int)bytes.size();
      paddle_init_param(
          parameterClient_, *newParameters_[i], (void *)array, size);
91 92 93 94
    }
    paddle_finish_init_params(parameterClient_);
    LOG(INFO) << "paddle_begin_init_params done";
  } else {
Q
qiaolongfei 已提交
95
    paddle_get_params(parameterClient_, newParameters_, parameterSize());
96 97 98 99 100 101 102 103 104
  }

  LOG(INFO) << "NewRemoteParameterUpdater initialized";
}

void NewRemoteParameterUpdater::updateImpl(Parameter *para) {}

void NewRemoteParameterUpdater::finishBatch(real cost) {
  // send gradient to parameter server.
105
  paddle_send_grads(parameterClient_, newGradients_, parameterSize());
106
  // get the updated parameter from parameterClient.
Q
qiaolongfei 已提交
107
  paddle_get_params(parameterClient_, newParameters_, parameterSize());
108 109 110 111 112 113 114 115 116 117

  // clear gradient after update parameter.
  for (auto &para : parameters_) {
    para->getBuf(PARAMETER_GRADIENT)->zeroMem();
  }
}

void NewRemoteParameterUpdater::startPass() {}

bool NewRemoteParameterUpdater::finishPass() { return true; }
W
wuyi05 已提交
118
}  // namespace paddle