parameter_recv.cc 3.6 KB
Newer Older
Q
Qiao Longfei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
//   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include <set>
#include <string>
#include <vector>

#include "paddle/fluid/operators/distributed/parameter_recv.h"

#include "paddle/fluid/framework/lod_tensor.h"
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/selected_rows.h"
#include "paddle/fluid/framework/tensor.h"

#include "paddle/fluid/operators/distributed/distributed.h"
#include "paddle/fluid/operators/distributed/rpc_client.h"
#include "paddle/fluid/operators/distributed/variable_response.h"
#include "paddle/fluid/operators/distributed_ops/send_recv_util.h"
Q
Qiao Longfei 已提交
30
#include "paddle/fluid/operators/strided_memcpy.h"
Q
Qiao Longfei 已提交
31 32 33 34 35 36 37 38 39 40 41

namespace paddle {
namespace operators {
namespace distributed {

using LoDTensor = framework::LoDTensor;
using LoDTensor = framework::LoDTensor;
using SelectedRows = framework::SelectedRows;
using DDim = framework::DDim;

template <typename T>
42
void ParameterRecv<T>::operator()(const RpcContext &rpc_ctx,
Q
Qiao Longfei 已提交
43
                                  const framework::ExecutionContext &ctx,
Q
Qiao Longfei 已提交
44
                                  const framework::Scope &scope) {
Q
Qiao Longfei 已提交
45 46 47 48 49 50 51 52 53
  framework::Scope *local_scope = scope.NewTmpScope();

  platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
  auto &cpu_ctx = *pool.Get(platform::CPUPlace());

  distributed::RPCClient *rpc_client =
      distributed::RPCClient::GetInstance<RPCCLIENT_T>(
          ctx.Attr<int>("trainer_id"));

54
  auto *recv_var = scope.FindVar(rpc_ctx.var_name);
Q
Qiao Longfei 已提交
55 56 57 58 59 60

  std::vector<framework::Tensor *> recved_tensors;

  // recv all vars to local scope
  if (recv_var->IsType<framework::LoDTensor>()) {
    std::vector<distributed::VarHandlePtr> rets;
61 62
    for (size_t i = 0; i < rpc_ctx.splited_var_names.size(); i++) {
      auto &recv_var_name = rpc_ctx.splited_var_names[i];
Q
Qiao Longfei 已提交
63 64 65
      framework::Tensor *t =
          local_scope->Var(recv_var_name)->GetMutable<framework::LoDTensor>();
      recved_tensors.push_back(t);
66 67 68 69
      VLOG(3) << "recv " << recv_var_name << " from " << rpc_ctx.epmap[i];
      rets.push_back(rpc_client->AsyncGetVar(rpc_ctx.epmap[i], cpu_ctx,
                                             *local_scope, recv_var_name,
                                             recv_var_name));
Q
Qiao Longfei 已提交
70
    }
Q
Qiao Longfei 已提交
71 72
    for (size_t i = 0; i < rets.size(); i++) {
      PADDLE_ENFORCE(rets[i]->Wait(), "internal error in RPCClient");
Q
Qiao Longfei 已提交
73 74
    }
  } else {
Q
Qiao Longfei 已提交
75
    PADDLE_THROW("unsupported var type to recv!");
Q
Qiao Longfei 已提交
76 77
  }

Q
Qiao Longfei 已提交
78 79 80 81 82 83 84 85 86 87 88 89
  // concat recved tensor into one var
  {
    size_t output_offset = 0;
    framework::Tensor *recv_tensor =
        recv_var->GetMutable<framework::LoDTensor>();
    for (auto *in : recved_tensors) {
      auto in_stride = framework::stride_numel(in->dims());
      auto out_stride = framework::stride_numel(recv_tensor->dims());
      StridedNumelCopyWithAxis<T>(
          ctx.device_context(), 0, recv_tensor->data<T>() + output_offset,
          out_stride, in->data<T>(), in_stride, in_stride[0]);
      output_offset += in_stride[0];
Q
Qiao Longfei 已提交
90 91 92 93 94 95 96 97 98 99 100
    }
  }

  delete local_scope;
}

template struct ParameterRecv<float>;

};  // namespace distributed
};  // namespace operators
};  // namespace paddle