rpc_server_test.cc 6.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include <unistd.h>
#include <string>
Y
Yancey1989 已提交
17
#include <thread>  // NOLINT
18 19

#include "gtest/gtest.h"
Y
Yancey1989 已提交
20
#include "paddle/fluid/framework/block_desc.h"
Y
Yancey1989 已提交
21 22 23
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/operator.h"

G
gongweibao 已提交
24
#include "paddle/fluid/operators/detail/macros.h"
25 26 27
#include "paddle/fluid/operators/distributed/request_handler_impl.h"
#include "paddle/fluid/operators/distributed/rpc_client.h"
#include "paddle/fluid/operators/distributed/rpc_server.h"
28

29 30
namespace framework = paddle::framework;
namespace platform = paddle::platform;
31
namespace distributed = paddle::operators::distributed;
32

Y
Yancey1989 已提交
33 34
USE_OP(lookup_table);

35 36
std::unique_ptr<distributed::RPCServer> g_rpc_service;
std::unique_ptr<distributed::RequestHandler> g_req_handler;
37

Y
Yancey1989 已提交
38 39 40
framework::BlockDesc* AppendPrefetchBlcok(framework::ProgramDesc* program) {
  auto root_block = program->MutableBlock(0);
  auto* block = program->AppendBlock(*root_block);
Y
Yancey1989 已提交
41 42 43 44 45 46 47 48

  framework::VariableNameMap input({{"W", {"w"}}, {"Ids", {"ids"}}});
  framework::VariableNameMap output({{"Output", {"out"}}});
  auto op = block->AppendOp();
  op->SetType("lookup_table");
  op->SetInput("W", {"w"});
  op->SetInput("Ids", {"ids"});
  op->SetOutput("Out", {"out"});
Y
Yancey1989 已提交
49 50

  auto& out = *root_block->Var("out");
Y
Yancey1989 已提交
51
  out.SetType(framework::proto::VarType::SELECTED_ROWS);
Y
Yancey1989 已提交
52 53
  out.SetShape({10, 10});

Y
Yancey1989 已提交
54 55 56
  return block;
}

Y
Yancey1989 已提交
57 58
void CreateVarsOnScope(framework::Scope* scope, platform::CPUPlace* place) {
  auto w_var = scope->Var("w");
Y
Yancey1989 已提交
59
  w_var->GetMutable<framework::SelectedRows>();
Y
Yancey1989 已提交
60

Y
Yancey1989 已提交
61
  auto out_var = scope->Var("out");
Y
Yancey1989 已提交
62
  out_var->GetMutable<framework::SelectedRows>();
Y
Yancey1989 已提交
63

Y
Yancey1989 已提交
64
  auto ids_var = scope->Var("ids");
Y
Yancey1989 已提交
65
  ids_var->GetMutable<framework::SelectedRows>();
Y
Yancey1989 已提交
66 67
}

Y
Yancey1989 已提交
68 69
void InitTensorsOnClient(framework::Scope* scope, platform::CPUPlace* place,
                         int64_t rows_numel) {
Y
Yancey1989 已提交
70
  CreateVarsOnScope(scope, place);
Y
Yancey1989 已提交
71 72 73 74 75
  auto ids_var = scope->Var("ids")->GetMutable<framework::SelectedRows>();
  auto rows = ids_var->mutable_rows();
  for (int64_t i = 0; i < rows_numel; ++i) rows->push_back(i * 2);
  ids_var->mutable_value()->Resize({rows_numel, 1});
  ids_var->mutable_value()->mutable_data<float>(*place);
Y
Yancey1989 已提交
76 77
}

Y
Yancey1989 已提交
78 79
void InitTensorsOnServer(framework::Scope* scope, platform::CPUPlace* place,
                         int64_t rows_numel) {
Y
Yancey1989 已提交
80
  CreateVarsOnScope(scope, place);
Y
Yancey1989 已提交
81 82 83 84 85 86 87 88 89
  auto w = scope->Var("w")->GetMutable<framework::SelectedRows>();
  auto rows = w->mutable_rows();
  for (int64_t i = 0; i < rows_numel; ++i) rows->push_back(i);
  auto w_value = w->mutable_value();
  w_value->Resize({rows_numel, 10});

  auto ptr = w_value->mutable_data<float>(*place);

  for (int64_t i = 0; i < w_value->numel(); ++i) {
Y
Yancey1989 已提交
90 91 92
    ptr[i] = static_cast<float>(i / 10);
  }
}
Y
Yancey1989 已提交
93

Y
Yancey1989 已提交
94
void StartServer(const std::string& rpc_name) {
Y
Yancey1989 已提交
95 96 97 98 99
  framework::ProgramDesc program;
  framework::Scope scope;
  platform::CPUPlace place;
  framework::Executor exe(place);
  platform::CPUDeviceContext ctx(place);
Y
Yancey1989 已提交
100
  auto* block = AppendPrefetchBlcok(&program);
101 102 103
  std::string in_var_name("ids");
  std::vector<int> prefetch_block_ids{block->ID()};
  auto prepared = exe.Prepare(program, prefetch_block_ids);
Y
Yancey1989 已提交
104
  InitTensorsOnServer(&scope, &place, 10);
Y
Yancey1989 已提交
105

106 107 108 109
  std::unordered_map<std::string,
                     std::shared_ptr<framework::ExecutorPrepareContext>>
      prefetch_var_name_to_prepared;
  prefetch_var_name_to_prepared[in_var_name] = prepared[0];
Y
Yancey1989 已提交
110

111
  g_req_handler->SetProgram(&program);
112
  g_req_handler->SetPrefetchPreparedCtx(&prefetch_var_name_to_prepared);
113 114 115 116
  g_req_handler->SetDevCtx(&ctx);
  g_req_handler->SetScope(&scope);
  g_req_handler->SetExecutor(&exe);

Y
Yancey1989 已提交
117
  g_rpc_service->RegisterRPC(rpc_name, g_req_handler.get());
118 119 120
  g_req_handler->SetRPCServer(g_rpc_service.get());

  std::thread server_thread(
121
      std::bind(&distributed::RPCServer::StartServer, g_rpc_service.get()));
Y
Yancey1989 已提交
122

123
  server_thread.join();
124 125
}

126
TEST(PREFETCH, CPU) {
127
  g_req_handler.reset(new distributed::RequestPrefetchHandler(true));
G
gongweibao 已提交
128
  g_rpc_service.reset(new RPCSERVER_T("127.0.0.1:0", 1));
129 130
  distributed::RPCClient* client =
      distributed::RPCClient::GetInstance<RPCCLIENT_T>();
131

Y
Yancey1989 已提交
132
  std::thread server_thread(StartServer, distributed::kRequestPrefetch);
133 134 135 136 137
  g_rpc_service->WaitServerReady();

  int port = g_rpc_service->GetSelectedPort();
  std::string ep = paddle::string::Sprintf("127.0.0.1:%d", port);

138 139 140
  framework::Scope scope;
  platform::CPUPlace place;
  platform::CPUDeviceContext ctx(place);
141 142 143 144 145 146 147
  {
    // create var on local scope
    int64_t rows_numel = 5;
    InitTensorsOnClient(&scope, &place, rows_numel);
    std::string in_var_name("ids");
    std::string out_var_name("out");

G
gongweibao 已提交
148
    client->AsyncPrefetchVar(ep, ctx, scope, in_var_name, out_var_name);
W
Wu Yi 已提交
149
    client->Wait();
150 151 152 153 154 155 156
    auto var = scope.Var(out_var_name);
    auto value = var->GetMutable<framework::SelectedRows>()->value();
    auto ptr = value.mutable_data<float>(place);

    for (int64_t i = 0; i < rows_numel; ++i) {
      EXPECT_EQ(ptr[0 + i * value.dims()[1]], static_cast<float>(i * 2));
    }
Y
Yancey1989 已提交
157
  }
158

W
Wu Yi 已提交
159
  g_rpc_service->ShutDown();
160 161 162 163
  server_thread.join();
  LOG(INFO) << "begin reset";
  g_rpc_service.reset(nullptr);
  g_req_handler.reset(nullptr);
164
}
Y
Yancey1989 已提交
165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185

TEST(COMPLETE, CPU) {
  g_req_handler.reset(new distributed::RequestSendHandler(true));
  g_rpc_service.reset(new RPCSERVER_T("127.0.0.1:0", 2));
  distributed::RPCClient* client =
      distributed::RPCClient::GetInstance<RPCCLIENT_T>();
  PADDLE_ENFORCE(client != nullptr);
  std::thread server_thread(StartServer, distributed::kRequestSend);
  g_rpc_service->WaitServerReady();
  int port = g_rpc_service->GetSelectedPort();
  std::string ep = paddle::string::Sprintf("127.0.0.1:%d", port);
  client->AsyncSendComplete(ep);
  client->Wait();

  EXPECT_EQ(g_rpc_service->GetClientNum(), 1);

  g_rpc_service->ShutDown();
  server_thread.join();
  g_rpc_service.reset(nullptr);
  g_req_handler.reset(nullptr);
}