send_recv_op_test.cc 7.8 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
武毅 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
武毅 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
武毅 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
武毅 已提交
14 15

#include <unistd.h>
Y
Yancey1989 已提交
16
#include <string>
17
#include <thread>  // NOLINT
武毅 已提交
18 19

#include "gtest/gtest.h"
Y
Yi Wang 已提交
20 21 22
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/program_desc.h"
T
typhoonzero 已提交
23
#include "paddle/fluid/operators/listen_and_serv_op.h"
Y
Yi Wang 已提交
24 25
#include "paddle/fluid/operators/math/math_function.h"
#include "paddle/fluid/operators/math/selected_rows_functor.h"
26
#include "paddle/fluid/string/printf.h"
武毅 已提交
27 28

USE_NO_KERNEL_OP(send);
T
typhoonzero 已提交
29
USE_NO_KERNEL_OP(listen_and_serv);
武毅 已提交
30 31
USE_OP(sum);

Y
Yancey 已提交
32 33 34 35
namespace f = paddle::framework;
namespace p = paddle::platform;
namespace m = paddle::operators::math;

武毅 已提交
36
// global for simplicity.
T
typhoonzero 已提交
37
std::unique_ptr<f::OperatorBase> listen_and_serv_op;
T
typhoonzero 已提交
38
int selected_port;
武毅 已提交
39

40
void InitTensorsInScope(const p::CPUPlace &place, f::Scope *scope) {
Y
Yancey 已提交
41
  p::CPUDeviceContext ctx(place);
Y
Yancey1989 已提交
42 43
  for (int i = 0; i < 2; ++i) {
    auto var_name = paddle::string::Sprintf("x%d", i);
44
    auto var = scope->Var(var_name);
Y
Yancey 已提交
45
    auto tensor = var->GetMutable<f::LoDTensor>();
Y
Yancey1989 已提交
46 47 48 49 50
    tensor->Resize({10, 10});
    float *expect = tensor->mutable_data<float>(place);
    for (int64_t i = 0; i < tensor->numel(); ++i) {
      expect[i] = static_cast<float>(i);
    }
武毅 已提交
51 52
  }

53
  auto out_var = scope->Var("Out");
Y
Yancey 已提交
54
  auto out_tensor = out_var->GetMutable<f::LoDTensor>();
武毅 已提交
55
  out_tensor->Resize({10, 10});
Y
Yancey1989 已提交
56
  out_tensor->mutable_data<float>(place);  // allocate
武毅 已提交
57 58
}

59
void InitSelectedRowsInScope(const p::CPUPlace &place, f::Scope *scope) {
Y
Yancey 已提交
60 61 62 63 64 65
  p::CPUDeviceContext ctx(place);
  int64_t height = 10;
  int64_t row_numel = 10;
  m::SetConstant<p::CPUDeviceContext, float> set_one;
  // init x0
  std::vector<int64_t> rows0{0, 4, 7};
66
  auto x0_var = scope->Var("x0");
Y
Yancey 已提交
67 68 69 70 71 72 73 74 75 76
  auto x0 = x0_var->GetMutable<f::SelectedRows>();
  x0->set_rows(rows0);
  x0->set_height(height);
  auto x0_value = x0->mutable_value();
  x0_value->mutable_data<float>(
      f::make_ddim({static_cast<int64_t>(rows0.size()), row_numel}), place);
  set_one(ctx, x0_value, 1.0);

  // init x1
  std::vector<int64_t> rows1{2, 9};
77
  auto x1_var = scope->Var("x1");
Y
Yancey 已提交
78 79 80 81 82 83 84 85
  auto x1 = x1_var->GetMutable<f::SelectedRows>();
  x1->set_rows(rows1);
  x1->set_height(height);
  auto x1_value = x1->mutable_value();
  x1_value->mutable_data<float>(
      f::make_ddim({static_cast<int64_t>(rows1.size()), row_numel}), place);
  set_one(ctx, x1_value, 1.0);

86
  auto out_var = scope->Var("Out");
Y
Yancey 已提交
87 88 89 90 91 92 93 94 95
  auto out = out_var->GetMutable<f::SelectedRows>();
  auto out_value = out->mutable_value();
  out->set_height(height);
  out_value->mutable_data<float>(f::make_ddim({5, 10}), place);
}

void AddOp(const std::string &type, const f::VariableNameMap &inputs,
           const f::VariableNameMap &outputs, f::AttributeMap attrs,
           f::BlockDesc *block) {
武毅 已提交
96 97 98 99
  // insert output
  for (auto kv : outputs) {
    for (auto v : kv.second) {
      auto var = block->Var(v);
Y
Yancey1989 已提交
100
      var->SetDataType(f::proto::VarType::FP32);
武毅 已提交
101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
    }
  }

  // insert op
  auto op = block->AppendOp();
  op->SetType(type);
  for (auto &kv : inputs) {
    op->SetInput(kv.first, kv.second);
  }
  for (auto &kv : outputs) {
    op->SetOutput(kv.first, kv.second);
  }
  op->SetAttrMap(attrs);
}

Y
Yancey 已提交
116 117 118 119
void StartServerNet(bool is_sparse) {
  f::Scope scope;
  p::CPUPlace place;
  if (is_sparse) {
120
    InitSelectedRowsInScope(place, &scope);
Y
Yancey 已提交
121
  } else {
122
    InitTensorsInScope(place, &scope);
Y
Yancey 已提交
123
  }
武毅 已提交
124

T
typhoonzero 已提交
125
  // sub program run in listen_and_serv_op, for simple test we use sum
Y
Yancey 已提交
126
  f::ProgramDesc program;
Q
qiaolongfei 已提交
127 128
  const auto &root_block = program.Block(0);
  auto *optimize_block = program.AppendBlock(root_block);
129
  auto *prefetch_block = program.AppendBlock(root_block);
Q
qiaolongfei 已提交
130
  // X for server side tensors, RX for received tensors, must be of same shape.
Y
Yancey1989 已提交
131
  AddOp("sum", {{"X", {"x0", "x1"}}}, {{"Out", {"Out"}}}, {}, optimize_block);
武毅 已提交
132

Y
Yancey 已提交
133
  f::AttributeMap attrs;
T
typhoonzero 已提交
134
  attrs.insert({"endpoint", std::string("127.0.0.1:0")});
Y
Yancey1989 已提交
135
  attrs.insert({"Fanin", 1});
Y
Yancey 已提交
136
  attrs.insert({"ParamList", std::vector<std::string>({"Out"})});
T
typhoonzero 已提交
137
  attrs.insert({"GradList", std::vector<std::string>({"x1"})});
Y
Yancey1989 已提交
138
  attrs.insert({"OptimizeBlock", optimize_block});
139
  attrs.insert({"PrefetchBlock", prefetch_block});
Q
qiaolongfei 已提交
140 141
  attrs.insert({"grad_map", {}});
  attrs.insert({"sync_mode", true});
T
typhoonzero 已提交
142
  listen_and_serv_op =
Y
Yancey1989 已提交
143
      f::OpRegistry::CreateOp("listen_and_serv", {{"X", {"x1"}}}, {}, attrs);
T
typhoonzero 已提交
144
  listen_and_serv_op->Run(scope, place);
T
typhoonzero 已提交
145
  LOG(INFO) << "server exit";
武毅 已提交
146 147
}

Y
Yancey 已提交
148 149
TEST(SendRecvOp, CPUDense) {
  std::thread server_thread(StartServerNet, false);
Y
Yancey1989 已提交
150
  sleep(5);  // wait server to start
武毅 已提交
151
  // local net
Y
Yancey 已提交
152 153
  f::Scope scope;
  p::CPUPlace place;
154
  InitTensorsInScope(place, &scope);
Y
Yancey1989 已提交
155 156
  // create rpc client var
  scope.Var("RPC_CLIENT_VAR");
武毅 已提交
157

Y
Yancey 已提交
158
  f::AttributeMap attrs;
T
typhoonzero 已提交
159 160 161 162 163 164
  selected_port = static_cast<paddle::operators::ListenAndServOp *>(
                      listen_and_serv_op.get())
                      ->GetSelectedPort();
  std::string endpoint = paddle::string::Sprintf("127.0.0.1:%d", selected_port);
  attrs.insert({"endpoints", std::vector<std::string>({endpoint})});
  attrs.insert({"epmap", std::vector<std::string>({endpoint})});
Y
Yancey1989 已提交
165 166 167
  auto send_op = f::OpRegistry::CreateOp(
      "send", {{"X", {"x1"}}},
      {{"Out", {"Out"}}, {"RPCClient", {"RPC_CLIENT_VAR"}}}, attrs);
T
typhoonzero 已提交
168
  send_op->Run(scope, place);
武毅 已提交
169

T
typhoonzero 已提交
170
  auto in_var = scope.Var("x1");
Y
Yancey 已提交
171
  auto tensor = in_var->GetMutable<f::LoDTensor>();
武毅 已提交
172
  float *expected = tensor->data<float>();
Y
Yancey 已提交
173 174
  auto out_var = scope.Var("Out");
  auto target = out_var->GetMutable<f::LoDTensor>();
T
typhoonzero 已提交
175
  // x1 * 2 == x0
武毅 已提交
176 177 178 179 180
  EXPECT_NE(target->memory_size(), size_t(0));
  float *actual = target->data<float>();
  for (int64_t i = 0; i < target->numel(); ++i) {
    EXPECT_EQ(expected[i] * 2, actual[i]);
  }
T
typhoonzero 已提交
181
  listen_and_serv_op->Stop();
Y
Yancey 已提交
182
  server_thread.join();
T
typhoonzero 已提交
183
  listen_and_serv_op.reset(nullptr);
Y
Yancey 已提交
184
}
T
typhoonzero 已提交
185

Y
Yancey 已提交
186 187 188 189 190 191 192
TEST(SendRecvOp, CPUSparse) {
  std::thread server_thread(StartServerNet, true);
  sleep(3);  // wait server to start
  // local net
  f::Scope scope;
  p::CPUPlace place;
  p::CPUDeviceContext ctx(place);
193
  InitSelectedRowsInScope(place, &scope);
Y
Yancey1989 已提交
194
  scope.Var("RPC_CLIENT_VAR");
Y
Yancey 已提交
195
  f::AttributeMap attrs;
T
typhoonzero 已提交
196 197 198 199 200 201
  selected_port = static_cast<paddle::operators::ListenAndServOp *>(
                      listen_and_serv_op.get())
                      ->GetSelectedPort();
  std::string endpoint = paddle::string::Sprintf("127.0.0.1:%d", selected_port);
  attrs.insert({"endpoints", std::vector<std::string>({endpoint})});
  attrs.insert({"epmap", std::vector<std::string>({endpoint})});
Y
Yancey1989 已提交
202 203 204
  auto send_op = f::OpRegistry::CreateOp(
      "send", {{"X", {"x1"}}},
      {{"Out", {"Out"}}, {"RPCClient", {"RPC_CLIENT_VAR"}}}, attrs);
Y
Yancey 已提交
205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225
  send_op->Run(scope, place);

  auto x0 = scope.Var("x0")->GetMutable<f::SelectedRows>();
  auto x1 = scope.Var("x1")->GetMutable<f::SelectedRows>();
  auto out = scope.Var("Out")->GetMutable<f::SelectedRows>();
  auto actual = out->mutable_value();

  std::unique_ptr<f::SelectedRows> expect{new f::SelectedRows()};
  auto expect_value = expect->mutable_value();
  expect_value->mutable_data<float>(f::make_ddim({5, 10}), place);

  m::SelectedRowsAdd<p::CPUDeviceContext, float> add_functor;
  add_functor(ctx, *x0, *x1, expect.get());

  EXPECT_EQ(actual->numel(), expect_value->numel());
  EXPECT_EQ(out->rows().size(), x0->rows().size() + x1->rows().size());

  for (int64_t i = 0; i < expect_value->numel(); ++i) {
    EXPECT_EQ(expect_value->mutable_data<float>(place)[i],
              actual->mutable_data<float>(place)[i]);
  }
T
typhoonzero 已提交
226
  listen_and_serv_op->Stop();
武毅 已提交
227
  server_thread.join();
T
typhoonzero 已提交
228
  listen_and_serv_op.reset();
武毅 已提交
229
}