request_handler_impl.cc 9.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

W
Wang Guibao 已提交
15
#include "paddle/fluid/operators/distributed/request_handler_impl.h"
16 17 18 19 20 21 22 23
#include <iostream>
#include <string>
#include <vector>

#include "paddle/fluid/framework/data_type.h"
#include "paddle/fluid/framework/lod_tensor.h"
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/selected_rows.h"
W
Wang Guibao 已提交
24
#include "paddle/fluid/framework/variable_helper.h"
Q
Qiao Longfei 已提交
25
#include "paddle/fluid/operators/distributed/async_sparse_param_update_recorder.h"
26
#include "paddle/fluid/operators/distributed/rpc_server.h"
27
#include "paddle/fluid/string/piece.h"
T
tangwei12 已提交
28
#include "paddle/fluid/string/printf.h"
29 30 31

namespace paddle {
namespace operators {
32
namespace distributed {
33

T
tangwei12 已提交
34 35
// define LOOKUP_TABLE_PATH for checkpoint notify to save lookup table variables
// to directory specified.
T
tangwei12 已提交
36
constexpr char LOOKUP_TABLE_PATH[] = "kLookupTablePath";
T
tangwei12 已提交
37

38 39 40
bool RequestSendHandler::Handle(const std::string& varname,
                                framework::Scope* scope,
                                framework::Variable* invar,
Q
qiaolongfei 已提交
41
                                framework::Variable** outvar,
W
Wu Yi 已提交
42
                                const int trainer_id,
Q
Qiao Longfei 已提交
43 44
                                const std::string& out_var_name,
                                const std::string& table_name) {
M
minqiyang 已提交
45
  VLOG(4) << "RequestSendHandler:" << varname;
46 47 48

  // Sync
  if (varname == BATCH_BARRIER_MESSAGE) {
M
minqiyang 已提交
49
    VLOG(3) << "sync: recv BATCH_BARRIER_MESSAGE";
50
    rpc_server_->IncreaseBatchBarrier(kRequestSend);
Y
Yancey1989 已提交
51
  } else if (varname == COMPLETE_MESSAGE) {
M
minqiyang 已提交
52
    VLOG(3) << "sync: recv complete message";
Y
Yancey1989 已提交
53
    rpc_server_->Complete();
54
  } else {
55 56
    // Async
    if (!sync_mode_) {
M
minqiyang 已提交
57
      VLOG(3) << "async process var: " << varname;
Q
Qiao Longfei 已提交
58
      if (varname == BATCH_BARRIER_MESSAGE) {
Q
Qiao Longfei 已提交
59 60 61 62
        PADDLE_THROW(
            "async mode should not recv BATCH_BARRIER_MESSAGE or "
            "COMPLETE_MESSAGE");
      }
Q
Qiao Longfei 已提交
63 64 65 66 67 68
      if (AsyncSparseParamUpdateRecorder::GetInstance()->HasGrad(varname)) {
        auto& grad_slr =
            scope->FindVar(varname)->Get<framework::SelectedRows>();
        AsyncSparseParamUpdateRecorder::GetInstance()->Update(varname,
                                                              grad_slr.rows());
      }
Q
Qiao Longfei 已提交
69 70
      executor_->RunPreparedContext((*grad_to_prepared_ctx_)[varname].get(),
                                    scope);
71 72 73
      return true;
    } else {  // sync
      rpc_server_->WaitCond(kRequestSend);
M
minqiyang 已提交
74
      VLOG(3) << "sync: processing received var: " << varname;
75

76 77 78 79
      if (invar == nullptr) {
        LOG(FATAL) << "sync: Can not find server side var: " << varname;
        return false;
      }
Y
Yancey1989 已提交
80
    }
81 82 83 84 85 86 87
  }
  return true;
}

bool RequestGetHandler::Handle(const std::string& varname,
                               framework::Scope* scope,
                               framework::Variable* invar,
Q
qiaolongfei 已提交
88
                               framework::Variable** outvar,
W
Wu Yi 已提交
89
                               const int trainer_id,
Q
Qiao Longfei 已提交
90 91
                               const std::string& out_var_name,
                               const std::string& table_name) {
92 93
  VLOG(4) << "RequestGetHandler:" << varname
          << " out_var_name: " << out_var_name;
94

Y
Yancey1989 已提交
95 96
  if (sync_mode_) {
    if (varname == FETCH_BARRIER_MESSAGE) {
M
minqiyang 已提交
97
      VLOG(3) << "sync: recv fetch barrier message";
Y
Yancey1989 已提交
98 99
      rpc_server_->IncreaseBatchBarrier(kRequestGet);
    } else {
100
      rpc_server_->WaitCond(kRequestGet);
Y
Yancey1989 已提交
101 102 103
      *outvar = scope_->FindVar(varname);
    }
  } else {
Y
Yancey1989 已提交
104
    if (varname != FETCH_BARRIER_MESSAGE && varname != COMPLETE_MESSAGE) {
W
Wu Yi 已提交
105 106 107 108
      if (enable_dc_asgd_) {
        // NOTE: the format is determined by distributed_transpiler.py
        std::string param_bak_name =
            string::Sprintf("%s.trainer_%d_bak", varname, trainer_id);
M
minqiyang 已提交
109
        VLOG(3) << "getting " << param_bak_name << " trainer_id " << trainer_id;
W
Wu Yi 已提交
110 111 112 113 114
        auto var = scope_->FindVar(varname);
        auto t_orig = var->Get<framework::LoDTensor>();
        auto param_bak = scope_->Var(param_bak_name);
        auto t = param_bak->GetMutable<framework::LoDTensor>();
        t->mutable_data(dev_ctx_->GetPlace(), t_orig.type());
M
minqiyang 已提交
115
        VLOG(3) << "copying " << varname << " to " << param_bak_name;
W
Wu Yi 已提交
116 117
        framework::TensorCopy(t_orig, dev_ctx_->GetPlace(), t);
      }
Q
Qiao Longfei 已提交
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
      if (AsyncSparseParamUpdateRecorder::GetInstance()->HasParam(varname)) {
        std::vector<int64_t> updated_rows;
        AsyncSparseParamUpdateRecorder::GetInstance()->GetAndClear(
            varname, trainer_id, &updated_rows);
        auto& origin_tensor =
            scope_->FindVar(varname)->Get<framework::LoDTensor>();
        auto* origin_tensor_data = origin_tensor.data<float>();
        auto& dims = origin_tensor.dims();
        *outvar = scope->Var();
        auto* out_slr = (*outvar)->GetMutable<framework::SelectedRows>();
        out_slr->set_rows(updated_rows);
        out_slr->set_height(dims[0]);
        auto out_dims = framework::make_ddim(
            {static_cast<int64_t>(updated_rows.size()), dims[1]});
        auto* data = out_slr->mutable_value()->mutable_data<float>(
            out_dims, origin_tensor.place());
        auto width = dims[1];
        for (auto i = 0; i < updated_rows.size(); ++i) {
          memcpy(data + i * width, origin_tensor_data + updated_rows[i] * width,
                 sizeof(float) * width);
        }
      } else {
        *outvar = scope_->FindVar(varname);
      }
142 143 144 145 146
    }
  }
  return true;
}

147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172
bool RequestGetNoBarrierHandler::Handle(const std::string& varname,
                                        framework::Scope* scope,
                                        framework::Variable* invar,
                                        framework::Variable** outvar,
                                        const int trainer_id,
                                        const std::string& out_var_name,
                                        const std::string& table_name) {
  VLOG(4) << "RequestGetNoBarrierHandler:" << varname
          << " out_var_name: " << out_var_name;

  // get var from pserver immediately without barriers
  string::Piece without_barrier_piece(WITHOUT_BARRIER_MESSAGE);
  string::Piece var_name_piece = string::Piece(varname);

  if (string::Contains(var_name_piece, without_barrier_piece)) {
    var_name_piece = string::TrimSuffix(var_name_piece, without_barrier_piece);
    VLOG(4) << "Get var " << var_name_piece << " with "
            << WITHOUT_BARRIER_MESSAGE;
    *outvar = scope_->FindVar(var_name_piece.ToString());
    return true;
  } else {
    PADDLE_THROW("GetNoBarrier must contain %s", WITHOUT_BARRIER_MESSAGE);
  }
  return true;
}

173 174 175
bool RequestPrefetchHandler::Handle(const std::string& varname,
                                    framework::Scope* scope,
                                    framework::Variable* invar,
Q
qiaolongfei 已提交
176
                                    framework::Variable** outvar,
W
Wu Yi 已提交
177
                                    const int trainer_id,
Q
Qiao Longfei 已提交
178 179
                                    const std::string& out_var_name,
                                    const std::string& table_name) {
M
minqiyang 已提交
180
  VLOG(4) << "RequestPrefetchHandler " << varname;
181

Q
Qiao Longfei 已提交
182
  if (table_name.empty()) {
Q
Qiao Longfei 已提交
183 184
    auto var_desc = program_->Block(0).FindVar(out_var_name);
    InitializeVariable(*outvar, var_desc->GetType());
Q
Qiao Longfei 已提交
185 186 187
    executor_->RunPreparedContext(
        (*prefetch_var_name_to_prepared_ctx_)[varname].get(), scope);
  } else {
Q
Qiao Longfei 已提交
188
    (*outvar)->GetMutable<framework::LoDTensor>();
Q
Qiao Longfei 已提交
189 190 191 192 193
    auto lookup_table_op =
        BuildLookupTableOp(table_name, varname, out_var_name);
    paddle::platform::CPUPlace cpu_place;
    lookup_table_op->Run(*scope, cpu_place);
  }
194 195 196
  return true;
}

T
tangwei12 已提交
197 198 199 200
bool RequestCheckpointHandler::Handle(const std::string& varname,
                                      framework::Scope* scope,
                                      framework::Variable* invar,
                                      framework::Variable** outvar,
W
Wu Yi 已提交
201
                                      const int trainer_id,
Q
Qiao Longfei 已提交
202 203
                                      const std::string& out_var_name,
                                      const std::string& table_name) {
204 205 206
  PADDLE_ENFORCE(
      checkpoint_notify_id != -1,
      "when checkpoint_notify_id = -1, there should be no RPC invoke.");
T
tangwei12 已提交
207

T
tangwei12 已提交
208
  // TODO(tangwei12): find out why scope will be error.
T
bug fix  
tangwei12 已提交
209
  auto* lt_var = scope_->FindVar(LOOKUP_TABLE_PATH)->GetMutable<std::string>();
T
tangwei12 已提交
210 211
  lt_var->clear();
  lt_var->append(out_var_name);
M
minqiyang 已提交
212 213
  VLOG(4) << "RequestCheckpointHandler update var kLookupTablePath to: "
          << out_var_name;
T
bug fix  
tangwei12 已提交
214
  executor_->RunPreparedContext(checkpoint_prepared_ctx_.get(), scope_);
T
bug fix  
tangwei12 已提交
215 216
  return true;
}
T
tangwei12 已提交
217

218
}  // namespace distributed
219 220
}  // namespace operators
}  // namespace paddle