recv_op.cc 7.0 KB
Newer Older
L
Luo Tao 已提交
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
武毅 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
武毅 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
武毅 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
武毅 已提交
14 15 16 17 18 19 20 21 22 23 24 25

#include <stdint.h>
#include <sys/stat.h>
#include <ostream>
#include <thread>

#include <unistd.h>

#include "paddle/framework/executor.h"
#include "paddle/framework/framework.pb.h"
#include "paddle/framework/lod_tensor.h"
#include "paddle/framework/op_registry.h"
T
typhoonzero 已提交
26
#include "paddle/framework/proto_desc.h"
G
gongweibao 已提交
27 28
#include "paddle/operators/detail/grpc_server.h"
#include "paddle/operators/detail/sendrecvop_utils.h"
武毅 已提交
29
#include "paddle/operators/detail/simple_block_queue.h"
T
typhoonzero 已提交
30
#include "paddle/string/printf.h"
武毅 已提交
31

T
typhoonzero 已提交
32 33
#define LISTEN_TERMINATE_MESSAGE "TERMINATE@RECV"

武毅 已提交
34 35 36
namespace paddle {
namespace operators {

G
gongweibao 已提交
37 38 39 40 41
void RunServer(std::shared_ptr<detail::AsyncGRPCServer> service) {
  service->RunSyncUpdate();
  VLOG(4) << "RunServer thread end";
}

Y
Yancey 已提交
42 43 44 45 46 47 48 49 50 51 52 53 54 55
static void CreateTensorFromMessageType(framework::Variable *var,
                                        sendrecv::VarType var_type) {
  if (var_type == sendrecv::VarType::LOD_TENSOR) {
    var->GetMutable<framework::LoDTensor>();
  } else if (var_type == sendrecv::VarType::SELECTED_ROWS) {
    var->GetMutable<framework::SelectedRows>();
  } else {
    PADDLE_THROW(
        "VraibleMessage type %d is not in "
        "[LoDTensor, SelectedRows]",
        var_type);
  }
}

武毅 已提交
56 57 58 59 60 61 62 63
class RecvOp : public framework::OperatorBase {
 public:
  RecvOp(const std::string &type, const framework::VariableNameMap &inputs,
         const framework::VariableNameMap &outputs,
         const framework::AttributeMap &attrs)
      : OperatorBase(type, inputs, outputs, attrs) {
    if (!rpc_service_) {
      std::string endpoint = Attr<std::string>("endpoint");
G
gongweibao 已提交
64 65
      rpc_service_.reset(new detail::AsyncGRPCServer(endpoint));
      server_thread_.reset(new std::thread(RunServer, rpc_service_));
武毅 已提交
66 67 68
    }
  }

T
typhoonzero 已提交
69
  void Stop() override {
Y
Yancey 已提交
70
    detail::MessageWithName term_msg;
T
typhoonzero 已提交
71 72
    term_msg.first = LISTEN_TERMINATE_MESSAGE;
    rpc_service_->Push(term_msg);
G
gongweibao 已提交
73
    rpc_service_->ShutDown();
武毅 已提交
74 75 76
    server_thread_->join();
  }

T
done  
typhoonzero 已提交
77
  std::string GetGradVarNameForTrainer(const std::string &varname) const {
T
typhoonzero 已提交
78
    if (grads_counter_.find(varname) == grads_counter_.end()) {
T
done  
typhoonzero 已提交
79 80
      grads_counter_[varname] = 0;
    }
T
typhoonzero 已提交
81
    return string::Sprintf("%s.trainer_%d", varname, grads_counter_[varname]++);
T
done  
typhoonzero 已提交
82 83
  }

武毅 已提交
84
  void Run(const framework::Scope &scope,
D
dzhwinter 已提交
85
           const platform::Place &dev_place) const override {
T
typhoonzero 已提交
86 87
    platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
    auto &dev_ctx = *pool.Get(dev_place);
武毅 已提交
88
    framework::Scope &recv_scope = scope.NewScope();
T
typhoonzero 已提交
89
    rpc_service_->SetScope(&recv_scope);
T
typhoonzero 已提交
90 91
    auto param_list = Attr<std::vector<std::string>>("ParamList");
    auto grad_list = Attr<std::vector<std::string>>("GradList");
T
typhoonzero 已提交
92
    auto fan_in = Attr<int>("Fanin");
T
typhoonzero 已提交
93
    size_t param_count = param_list.size();
G
gongweibao 已提交
94

T
typhoonzero 已提交
95 96 97 98 99 100
    std::string program_str = Attr<std::string>("OptimizeProgram");
    framework::proto::ProgramDesc program_desc;
    program_desc.ParseFromString(program_str);
    framework::ProgramDesc program(program_desc);
    framework::Executor executor(dev_place);

T
typhoonzero 已提交
101
    rpc_service_->Reset();
T
typhoonzero 已提交
102
    // TODO(typhoonzero): change this to a while_op for every cluster-batch.
T
typhoonzero 已提交
103 104
    bool exit_flag = false;
    while (!exit_flag) {
T
typhoonzero 已提交
105 106 107
      // Get from multiple trainers, we don't care about the order in which
      // the gradients arrives, just add suffix 0~n and merge the gradient.
      for (size_t i = 0; i < param_count * fan_in; ++i) {
Y
Yancey 已提交
108
        const detail::MessageWithName &v = rpc_service_->Get();
T
typhoonzero 已提交
109
        auto grad_var_name = v.first;
T
typhoonzero 已提交
110
        if (grad_var_name == LISTEN_TERMINATE_MESSAGE) {
T
typhoonzero 已提交
111
          LOG(INFO) << "received terminate message and exit";
T
typhoonzero 已提交
112 113 114
          exit_flag = true;
          break;
        }
T
typhoonzero 已提交
115 116 117 118
        auto it = std::find(grad_list.begin(), grad_list.end(), grad_var_name);
        std::string param_var_name;
        if (it != grad_list.end()) {
          param_var_name = param_list[it - grad_list.begin()];
T
typhoonzero 已提交
119
        } else {
T
typhoonzero 已提交
120
          LOG(ERROR) << "grad have no paired param:" << grad_var_name;
T
typhoonzero 已提交
121
        }
T
typhoonzero 已提交
122 123
        VLOG(3) << "recved grad: " << grad_var_name
                << " updating param: " << param_var_name;
T
typhoonzero 已提交
124 125 126 127
        // Assume grad_var_name must appear in global scope.
        std::string grad_var_name_trainer;
        if (fan_in > 1) {
          grad_var_name_trainer = this->GetGradVarNameForTrainer(grad_var_name);
T
typhoonzero 已提交
128
        }
T
typhoonzero 已提交
129 130 131 132 133
        auto *var = recv_scope.FindVar(grad_var_name_trainer);
        if (var == nullptr) {
          LOG(ERROR) << "can not find server side var: "
                     << grad_var_name_trainer;
          PADDLE_THROW("can not find server side var");
T
done  
typhoonzero 已提交
134
        }
Y
Yancey 已提交
135
        detail::DeserializeFromMessage(v.second, dev_ctx, var);
T
typhoonzero 已提交
136 137 138
      }
      if (exit_flag) {
        break;
T
typhoonzero 已提交
139
      }
T
typhoonzero 已提交
140
      rpc_service_->Reset();
T
typhoonzero 已提交
141 142 143 144 145 146
      try {
        executor.Run(program, &recv_scope, 0, /*global_block*/
                     false /*create_local_scope*/, false /*create_vars*/);
      } catch (std::exception &e) {
        LOG(ERROR) << "run sub program error " << e.what();
      }
G
gongweibao 已提交
147

T
typhoonzero 已提交
148
      rpc_service_->Done();
T
typhoonzero 已提交
149
      grads_counter_.clear();
T
typhoonzero 已提交
150
    }  // while(true)
武毅 已提交
151 152 153
  }

 protected:
G
gongweibao 已提交
154
  std::shared_ptr<detail::AsyncGRPCServer> rpc_service_;
武毅 已提交
155
  std::shared_ptr<std::thread> server_thread_;
T
done  
typhoonzero 已提交
156
  mutable std::unordered_map<std::string, int> grads_counter_;
武毅 已提交
157 158 159 160
};

class RecvOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
161
  RecvOpMaker(OpProto *proto, OpAttrChecker *op_checker)
武毅 已提交
162
      : OpProtoAndCheckerMaker(proto, op_checker) {
T
typhoonzero 已提交
163
    AddInput("RX", "(Tensor) Input tensor to be optimized").AsDuplicable();
武毅 已提交
164 165 166 167 168 169 170 171 172 173
    AddComment(R"DOC(
Recv operator

This operator will recv tensor from send_op
)DOC");
    AddAttr<std::string>("endpoint",
                         "(string, default 127.0.0.1:6164)"
                         "IP address to listen on.")
        .SetDefault("127.0.0.1:6164")
        .AddCustomChecker([](const std::string &ip) { return !ip.empty(); });
T
typhoonzero 已提交
174 175
    AddAttr<std::string>("OptimizeProgram", "type string",
                         "Serialized ProgramDesc string for recv to run.");
T
typhoonzero 已提交
176 177
    AddAttr<std::vector<std::string>>(
        "ParamList", "type list of string",
Y
Yancey1989 已提交
178 179
        "grad->param name mapping to find which param to optimize.")
        .SetDefault({});
T
typhoonzero 已提交
180 181
    AddAttr<std::vector<std::string>>(
        "GradList", "type list of string",
Y
Yancey1989 已提交
182 183
        "grad->param name mapping to find which param to optimize.")
        .SetDefault({});
T
typhoonzero 已提交
184
    AddAttr<int>("Fanin", "type int",
T
done  
typhoonzero 已提交
185 186
                 "Number of trainers in the current cluster job")
        .SetDefault(1);
武毅 已提交
187 188 189 190 191 192 193 194 195
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;

REGISTER_OPERATOR(recv, ops::RecvOp, ops::RecvOpMaker);