recv_op.cc 7.1 KB
Newer Older
L
Luo Tao 已提交
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
武毅 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
武毅 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
武毅 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
武毅 已提交
14 15 16 17 18 19 20 21 22 23 24 25

#include <stdint.h>
#include <sys/stat.h>
#include <ostream>
#include <thread>

#include <unistd.h>

#include "paddle/framework/executor.h"
#include "paddle/framework/framework.pb.h"
#include "paddle/framework/lod_tensor.h"
#include "paddle/framework/op_registry.h"
T
typhoonzero 已提交
26
#include "paddle/framework/proto_desc.h"
G
gongweibao 已提交
27 28
#include "paddle/operators/detail/grpc_server.h"
#include "paddle/operators/detail/sendrecvop_utils.h"
武毅 已提交
29
#include "paddle/operators/detail/simple_block_queue.h"
T
typhoonzero 已提交
30
#include "paddle/string/printf.h"
武毅 已提交
31

T
typhoonzero 已提交
32 33
#define LISTEN_TERMINATE_MESSAGE "TERMINATE@RECV"

武毅 已提交
34 35 36
namespace paddle {
namespace operators {

T
typhoonzero 已提交
37 38 39 40
constexpr int kCondStart = 0;
constexpr int kCondRunning = 1;
constexpr int kCondDone = 2;

G
gongweibao 已提交
41 42 43 44 45
void RunServer(std::shared_ptr<detail::AsyncGRPCServer> service) {
  service->RunSyncUpdate();
  VLOG(4) << "RunServer thread end";
}

Y
Yancey 已提交
46 47 48 49 50 51 52 53 54 55 56 57 58 59
static void CreateTensorFromMessageType(framework::Variable *var,
                                        sendrecv::VarType var_type) {
  if (var_type == sendrecv::VarType::LOD_TENSOR) {
    var->GetMutable<framework::LoDTensor>();
  } else if (var_type == sendrecv::VarType::SELECTED_ROWS) {
    var->GetMutable<framework::SelectedRows>();
  } else {
    PADDLE_THROW(
        "VraibleMessage type %d is not in "
        "[LoDTensor, SelectedRows]",
        var_type);
  }
}

武毅 已提交
60 61 62 63 64 65 66 67
class RecvOp : public framework::OperatorBase {
 public:
  RecvOp(const std::string &type, const framework::VariableNameMap &inputs,
         const framework::VariableNameMap &outputs,
         const framework::AttributeMap &attrs)
      : OperatorBase(type, inputs, outputs, attrs) {
    if (!rpc_service_) {
      std::string endpoint = Attr<std::string>("endpoint");
G
gongweibao 已提交
68 69
      rpc_service_.reset(new detail::AsyncGRPCServer(endpoint));
      server_thread_.reset(new std::thread(RunServer, rpc_service_));
武毅 已提交
70 71 72
    }
  }

T
typhoonzero 已提交
73
  void Stop() override {
Y
Yancey 已提交
74
    detail::MessageWithName term_msg;
T
typhoonzero 已提交
75 76
    term_msg.first = LISTEN_TERMINATE_MESSAGE;
    rpc_service_->Push(term_msg);
G
gongweibao 已提交
77
    rpc_service_->ShutDown();
武毅 已提交
78 79 80
    server_thread_->join();
  }

T
done  
typhoonzero 已提交
81
  std::string GetGradVarNameForTrainer(const std::string &varname) const {
T
typhoonzero 已提交
82
    if (grads_counter_.find(varname) == grads_counter_.end()) {
T
done  
typhoonzero 已提交
83 84
      grads_counter_[varname] = 0;
    }
T
typhoonzero 已提交
85
    return string::Sprintf("%s.trainer_%d", varname, grads_counter_[varname]++);
T
done  
typhoonzero 已提交
86 87
  }

武毅 已提交
88
  void Run(const framework::Scope &scope,
D
dzhwinter 已提交
89
           const platform::Place &dev_place) const override {
T
typhoonzero 已提交
90 91
    platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
    auto &dev_ctx = *pool.Get(dev_place);
武毅 已提交
92
    framework::Scope &recv_scope = scope.NewScope();
Y
Yancey1989 已提交
93

Y
Yancey1989 已提交
94
    // FIXME(Yancey1989): initialize rpc server with laze mode.
T
typhoonzero 已提交
95
    rpc_service_->SetScope(&recv_scope);
Y
Yancey1989 已提交
96
    rpc_service_->SetDevCtx(&dev_ctx);
T
typhoonzero 已提交
97 98
    auto param_list = Attr<std::vector<std::string>>("ParamList");
    auto grad_list = Attr<std::vector<std::string>>("GradList");
T
typhoonzero 已提交
99
    auto fan_in = Attr<int>("Fanin");
T
typhoonzero 已提交
100
    size_t param_count = param_list.size();
G
gongweibao 已提交
101

T
typhoonzero 已提交
102 103 104 105 106 107
    std::string program_str = Attr<std::string>("OptimizeProgram");
    framework::proto::ProgramDesc program_desc;
    program_desc.ParseFromString(program_str);
    framework::ProgramDesc program(program_desc);
    framework::Executor executor(dev_place);

T
typhoonzero 已提交
108
    // TODO(typhoonzero): change this to a while_op for every cluster-batch.
T
typhoonzero 已提交
109
    bool exit_flag = false;
110
    size_t barrier_size = param_count * fan_in;
T
typhoonzero 已提交
111
    while (!exit_flag) {
T
typhoonzero 已提交
112 113
      // Get from multiple trainers, we don't care about the order in which
      // the gradients arrives, just add suffix 0~n and merge the gradient.
T
typhoonzero 已提交
114 115
      rpc_service_->SetCond(0);
      for (size_t i = 0; i < barrier_size; ++i) {
Y
Yancey 已提交
116
        const detail::MessageWithName &v = rpc_service_->Get();
T
typhoonzero 已提交
117
        auto grad_var_name = v.first;
T
typhoonzero 已提交
118
        if (grad_var_name == LISTEN_TERMINATE_MESSAGE) {
T
typhoonzero 已提交
119
          LOG(INFO) << "received terminate message and exit";
T
typhoonzero 已提交
120 121 122
          exit_flag = true;
          break;
        }
T
typhoonzero 已提交
123 124 125 126
        auto it = std::find(grad_list.begin(), grad_list.end(), grad_var_name);
        std::string param_var_name;
        if (it != grad_list.end()) {
          param_var_name = param_list[it - grad_list.begin()];
T
typhoonzero 已提交
127
        } else {
T
typhoonzero 已提交
128
          LOG(ERROR) << "grad have no paired param:" << grad_var_name;
T
typhoonzero 已提交
129
        }
T
typhoonzero 已提交
130 131
        VLOG(3) << "recved grad: " << grad_var_name
                << " updating param: " << param_var_name;
T
typhoonzero 已提交
132
        if (fan_in > 1) {
T
typhoonzero 已提交
133
          grad_var_name = this->GetGradVarNameForTrainer(grad_var_name);
T
typhoonzero 已提交
134
        }
135
        auto *var = recv_scope.FindVar(grad_var_name);
T
typhoonzero 已提交
136
        if (var == nullptr) {
137
          LOG(ERROR) << "can not find server side var: " << grad_var_name;
T
typhoonzero 已提交
138
          PADDLE_THROW("can not find server side var");
T
done  
typhoonzero 已提交
139
        }
Y
Yancey 已提交
140
        detail::DeserializeFromMessage(v.second, dev_ctx, var);
T
typhoonzero 已提交
141 142 143
      }
      if (exit_flag) {
        break;
T
typhoonzero 已提交
144
      }
T
typhoonzero 已提交
145 146 147 148 149 150
      try {
        executor.Run(program, &recv_scope, 0, /*global_block*/
                     false /*create_local_scope*/, false /*create_vars*/);
      } catch (std::exception &e) {
        LOG(ERROR) << "run sub program error " << e.what();
      }
T
typhoonzero 已提交
151 152
      rpc_service_->SetCond(1);
      rpc_service_->WaitClientGet(barrier_size);
T
typhoonzero 已提交
153
      grads_counter_.clear();
T
typhoonzero 已提交
154
    }  // while(true)
武毅 已提交
155 156 157
  }

 protected:
G
gongweibao 已提交
158
  std::shared_ptr<detail::AsyncGRPCServer> rpc_service_;
武毅 已提交
159
  std::shared_ptr<std::thread> server_thread_;
T
done  
typhoonzero 已提交
160
  mutable std::unordered_map<std::string, int> grads_counter_;
武毅 已提交
161 162 163 164
};

class RecvOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
165
  RecvOpMaker(OpProto *proto, OpAttrChecker *op_checker)
武毅 已提交
166
      : OpProtoAndCheckerMaker(proto, op_checker) {
T
typhoonzero 已提交
167
    AddInput("RX", "(Tensor) Input tensor to be optimized").AsDuplicable();
武毅 已提交
168 169 170 171 172 173 174 175 176 177
    AddComment(R"DOC(
Recv operator

This operator will recv tensor from send_op
)DOC");
    AddAttr<std::string>("endpoint",
                         "(string, default 127.0.0.1:6164)"
                         "IP address to listen on.")
        .SetDefault("127.0.0.1:6164")
        .AddCustomChecker([](const std::string &ip) { return !ip.empty(); });
T
typhoonzero 已提交
178 179
    AddAttr<std::string>("OptimizeProgram", "type string",
                         "Serialized ProgramDesc string for recv to run.");
T
typhoonzero 已提交
180 181
    AddAttr<std::vector<std::string>>(
        "ParamList", "type list of string",
Y
Yancey1989 已提交
182 183
        "grad->param name mapping to find which param to optimize.")
        .SetDefault({});
T
typhoonzero 已提交
184 185
    AddAttr<std::vector<std::string>>(
        "GradList", "type list of string",
Y
Yancey1989 已提交
186 187
        "grad->param name mapping to find which param to optimize.")
        .SetDefault({});
T
typhoonzero 已提交
188
    AddAttr<int>("Fanin", "type int",
T
done  
typhoonzero 已提交
189 190
                 "Number of trainers in the current cluster job")
        .SetDefault(1);
武毅 已提交
191 192 193 194 195 196 197 198 199
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;

REGISTER_OPERATOR(recv, ops::RecvOp, ops::RecvOpMaker);