rpc_server_test.cc 6.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include <unistd.h>
#include <string>
Y
Yancey1989 已提交
17
#include <thread>  // NOLINT
18 19

#include "gtest/gtest.h"
Y
Yancey1989 已提交
20
#include "paddle/fluid/framework/block_desc.h"
Y
Yancey1989 已提交
21 22 23
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/operator.h"

G
gongweibao 已提交
24
#include "paddle/fluid/operators/detail/macros.h"
25 26 27
#include "paddle/fluid/operators/distributed/request_handler_impl.h"
#include "paddle/fluid/operators/distributed/rpc_client.h"
#include "paddle/fluid/operators/distributed/rpc_server.h"
28

29 30
namespace framework = paddle::framework;
namespace platform = paddle::platform;
31
namespace distributed = paddle::operators::distributed;
32

33
USE_NO_KERNEL_OP(lookup_sparse_table);
Y
Yancey1989 已提交
34

35 36
std::unique_ptr<distributed::RPCServer> g_rpc_service;
std::unique_ptr<distributed::RequestHandler> g_req_handler;
37

Y
Yancey1989 已提交
38 39 40
framework::BlockDesc* AppendPrefetchBlcok(framework::ProgramDesc* program) {
  auto root_block = program->MutableBlock(0);
  auto* block = program->AppendBlock(*root_block);
Y
Yancey1989 已提交
41 42 43 44

  framework::VariableNameMap input({{"W", {"w"}}, {"Ids", {"ids"}}});
  framework::VariableNameMap output({{"Output", {"out"}}});
  auto op = block->AppendOp();
45
  op->SetType("lookup_sparse_table");
Y
Yancey1989 已提交
46 47 48
  op->SetInput("W", {"w"});
  op->SetInput("Ids", {"ids"});
  op->SetOutput("Out", {"out"});
Y
Yancey1989 已提交
49 50

  auto& out = *root_block->Var("out");
51
  out.SetType(framework::proto::VarType::LOD_TENSOR);
Y
Yancey1989 已提交
52 53
  out.SetShape({10, 10});

Y
Yancey1989 已提交
54 55 56
  return block;
}

Y
Yancey1989 已提交
57 58
void CreateVarsOnScope(framework::Scope* scope, platform::CPUPlace* place) {
  auto w_var = scope->Var("w");
Y
Yancey1989 已提交
59
  w_var->GetMutable<framework::SelectedRows>();
Y
Yancey1989 已提交
60

Y
Yancey1989 已提交
61
  auto out_var = scope->Var("out");
62
  out_var->GetMutable<framework::LoDTensor>();
Y
Yancey1989 已提交
63

Y
Yancey1989 已提交
64
  auto ids_var = scope->Var("ids");
65
  ids_var->GetMutable<framework::LoDTensor>();
Y
Yancey1989 已提交
66 67
}

Y
Yancey1989 已提交
68 69
void InitTensorsOnClient(framework::Scope* scope, platform::CPUPlace* place,
                         int64_t rows_numel) {
Y
Yancey1989 已提交
70
  CreateVarsOnScope(scope, place);
71 72 73 74
  auto ids_var = scope->Var("ids")->GetMutable<framework::LoDTensor>();
  int64_t* ids_ptr =
      ids_var->mutable_data<int64_t>(framework::DDim({rows_numel, 1}), *place);
  for (int64_t i = 0; i < rows_numel; ++i) ids_ptr[i] = i * 2;
Y
Yancey1989 已提交
75 76
}

Y
Yancey1989 已提交
77 78
void InitTensorsOnServer(framework::Scope* scope, platform::CPUPlace* place,
                         int64_t rows_numel) {
Y
Yancey1989 已提交
79
  CreateVarsOnScope(scope, place);
Y
Yancey1989 已提交
80 81 82 83 84 85 86 87 88
  auto w = scope->Var("w")->GetMutable<framework::SelectedRows>();
  auto rows = w->mutable_rows();
  for (int64_t i = 0; i < rows_numel; ++i) rows->push_back(i);
  auto w_value = w->mutable_value();
  w_value->Resize({rows_numel, 10});

  auto ptr = w_value->mutable_data<float>(*place);

  for (int64_t i = 0; i < w_value->numel(); ++i) {
Y
Yancey1989 已提交
89 90 91
    ptr[i] = static_cast<float>(i / 10);
  }
}
Y
Yancey1989 已提交
92

Y
Yancey1989 已提交
93
void StartServer(const std::string& rpc_name) {
Y
Yancey1989 已提交
94 95 96 97 98
  framework::ProgramDesc program;
  framework::Scope scope;
  platform::CPUPlace place;
  framework::Executor exe(place);
  platform::CPUDeviceContext ctx(place);
Y
Yancey1989 已提交
99
  auto* block = AppendPrefetchBlcok(&program);
100 101 102
  std::string in_var_name("ids");
  std::vector<int> prefetch_block_ids{block->ID()};
  auto prepared = exe.Prepare(program, prefetch_block_ids);
Y
Yancey1989 已提交
103
  InitTensorsOnServer(&scope, &place, 10);
Y
Yancey1989 已提交
104

105 106 107 108
  std::unordered_map<std::string,
                     std::shared_ptr<framework::ExecutorPrepareContext>>
      prefetch_var_name_to_prepared;
  prefetch_var_name_to_prepared[in_var_name] = prepared[0];
Y
Yancey1989 已提交
109

110
  g_req_handler->SetProgram(&program);
111
  g_req_handler->SetPrefetchPreparedCtx(&prefetch_var_name_to_prepared);
112 113 114 115
  g_req_handler->SetDevCtx(&ctx);
  g_req_handler->SetScope(&scope);
  g_req_handler->SetExecutor(&exe);

Y
Yancey1989 已提交
116
  g_rpc_service->RegisterRPC(rpc_name, g_req_handler.get());
117 118 119
  g_req_handler->SetRPCServer(g_rpc_service.get());

  std::thread server_thread(
120
      std::bind(&distributed::RPCServer::StartServer, g_rpc_service.get()));
Y
Yancey1989 已提交
121

122
  server_thread.join();
123 124
}

125
TEST(PREFETCH, CPU) {
126
  g_req_handler.reset(new distributed::RequestPrefetchHandler(true));
G
gongweibao 已提交
127
  g_rpc_service.reset(new RPCSERVER_T("127.0.0.1:0", 1));
128 129
  distributed::RPCClient* client =
      distributed::RPCClient::GetInstance<RPCCLIENT_T>();
130

Y
Yancey1989 已提交
131
  std::thread server_thread(StartServer, distributed::kRequestPrefetch);
132 133 134 135 136
  g_rpc_service->WaitServerReady();

  int port = g_rpc_service->GetSelectedPort();
  std::string ep = paddle::string::Sprintf("127.0.0.1:%d", port);

137 138 139
  framework::Scope scope;
  platform::CPUPlace place;
  platform::CPUDeviceContext ctx(place);
140 141 142 143 144 145 146
  {
    // create var on local scope
    int64_t rows_numel = 5;
    InitTensorsOnClient(&scope, &place, rows_numel);
    std::string in_var_name("ids");
    std::string out_var_name("out");

G
gongweibao 已提交
147
    client->AsyncPrefetchVar(ep, ctx, scope, in_var_name, out_var_name);
W
Wu Yi 已提交
148
    client->Wait();
149
    auto var = scope.Var(out_var_name);
150 151
    auto value = var->GetMutable<framework::LoDTensor>();
    auto ptr = value->mutable_data<float>(place);
152 153

    for (int64_t i = 0; i < rows_numel; ++i) {
154
      EXPECT_EQ(ptr[0 + i * value->dims()[1]], static_cast<float>(i * 2));
155
    }
Y
Yancey1989 已提交
156
  }
157

W
Wu Yi 已提交
158
  g_rpc_service->ShutDown();
159 160 161 162
  server_thread.join();
  LOG(INFO) << "begin reset";
  g_rpc_service.reset(nullptr);
  g_req_handler.reset(nullptr);
163
}
Y
Yancey1989 已提交
164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184

TEST(COMPLETE, CPU) {
  g_req_handler.reset(new distributed::RequestSendHandler(true));
  g_rpc_service.reset(new RPCSERVER_T("127.0.0.1:0", 2));
  distributed::RPCClient* client =
      distributed::RPCClient::GetInstance<RPCCLIENT_T>();
  PADDLE_ENFORCE(client != nullptr);
  std::thread server_thread(StartServer, distributed::kRequestSend);
  g_rpc_service->WaitServerReady();
  int port = g_rpc_service->GetSelectedPort();
  std::string ep = paddle::string::Sprintf("127.0.0.1:%d", port);
  client->AsyncSendComplete(ep);
  client->Wait();

  EXPECT_EQ(g_rpc_service->GetClientNum(), 1);

  g_rpc_service->ShutDown();
  server_thread.join();
  g_rpc_service.reset(nullptr);
  g_req_handler.reset(nullptr);
}