NewRemoteParameterUpdater.h 3.9 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

#include <functional>
#include <thread>
W
wuyi05 已提交
19
#include "OptimizerConfig.pb.h"
20 21 22 23 24 25 26 27 28 29 30 31 32 33 34
#include "ParameterUpdater.h"
#include "libpaddle_pserver_cclient.h"
#include "paddle/pserver/ParameterClient2.h"
#include "paddle/utils/Queue.h"
#include "paddle/utils/Util.h"

namespace paddle {

/**
 * New remote parameter updater for dense parameters that use cclient of go.
 */
class NewRemoteParameterUpdater : public ParameterUpdater {
public:
  NewRemoteParameterUpdater(const OptimizationConfig& config,
                            const std::string pserverSpec);
35 36 37
  NewRemoteParameterUpdater(const OptimizationConfig& config,
                            const std::string pserverSpec,
                            const bool useEtcd);
38
  ~NewRemoteParameterUpdater() {
39 40
    releaseNewParameter(newParameters_);
    releaseNewParameter(newGradients_);
Q
qiaolongfei 已提交
41
    if (parameterClient_ >= 0) paddle_pserver_client_release(parameterClient_);
42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63
  }

  /**
   * initialize the internal parameter client and itself.
   */
  virtual void init(const std::vector<ParameterPtr>& parameters);
  /**
   * @brief start batch
   *
   * @note  one batch training exhibits stateful feature to help
   *        to do performance tuning, sgd optimization if necessary.
   */
  virtual PassType startBatch(int64_t batchSize) { return PASS_TRAIN; }

  /**
   * send parameters to pservers and get returned parameters
   * from all pservers if necessary.
   */
  virtual void finishBatch(real cost);
  virtual void startPass();
  virtual bool finishPass();

Q
qiaolongfei 已提交
64
protected:
65
  /**
Q
qiaolongfei 已提交
66
   * work need to do after finishBatch
67
   */
Q
qiaolongfei 已提交
68 69 70
  virtual void updateImpl(Parameter* para);

private:
71
  int parameterSize() { return (int)parameters_.size(); }
72

73 74 75 76 77 78 79 80 81 82 83 84
  /**
   * init parameter of go paddle pserver cclient.
   * @param new_params
   * @param type
   */
  paddle_parameter** initNewParameter(ParameterType type) {
    paddle_parameter** new_params =
        (paddle_parameter**)malloc(sizeof(paddle_parameter*) * parameterSize());
    for (int i = 0; i < parameterSize(); ++i) {
      new_params[i] = (paddle_parameter*)malloc(sizeof(paddle_parameter));
      memset(new_params[i], 0, sizeof(paddle_parameter));
    }
Q
qiaolongfei 已提交
85

86 87 88 89 90 91
    for (int i = 0; i < parameterSize(); ++i) {
      ParameterPtr param = parameters_[i];
      new_params[i]->element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
      new_params[i]->name = (char*)param->getName().c_str();
      new_params[i]->content =
          (unsigned char*)(param->getBuf(type).get()->getData());
Q
qiaolongfei 已提交
92 93
      new_params[i]->content_len =
          (int)param->getBuf(type).get()->getSize() * sizeof(real);
94
    }
95 96
    return new_params;
  }
97

98 99 100
  void releaseNewParameter(paddle_parameter** newParams) {
    if (newParams != nullptr) {
      for (int i = 0; i < parameterSize(); ++i) {
101
        free(newParams[i]);
Q
qiaolongfei 已提交
102
      }
103
      free(newParams);
Q
qiaolongfei 已提交
104
    }
105
  }
106 107

protected:
W
wuyi05 已提交
108
  const OptimizationConfig& trainerConfig_;
109
  /// internal parameter client object for exchanging data with pserver
Q
qiaolongfei 已提交
110
  paddle_pserver_client parameterClient_;
111 112 113 114 115 116
  /// the parameters for new pserver client
  paddle_parameter** newParameters_;
  /// the gradinets for new pserver client
  paddle_parameter** newGradients_;
  /// the specification of parameter server "host1:port,host1:port"
  std::string pserverSpec_;
117 118
  /// true if pserverSpec_ is etcd endpoint, else pserverSpec_ is pserver addr
  bool useEtcd_;
119 120 121
};

}  // namespace paddle