send_recv_op_test.cc 8.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
武毅 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
武毅 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
武毅 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
武毅 已提交
14 15

#include <unistd.h>
Y
Yancey1989 已提交
16
#include <string>
17
#include <thread>  // NOLINT
武毅 已提交
18 19

#include "gtest/gtest.h"
Y
Yi Wang 已提交
20 21 22
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/program_desc.h"
T
typhoonzero 已提交
23
#include "paddle/fluid/operators/listen_and_serv_op.h"
Y
Yi Wang 已提交
24 25
#include "paddle/fluid/operators/math/math_function.h"
#include "paddle/fluid/operators/math/selected_rows_functor.h"
26
#include "paddle/fluid/string/printf.h"
武毅 已提交
27 28

USE_NO_KERNEL_OP(send);
T
typhoonzero 已提交
29
USE_NO_KERNEL_OP(listen_and_serv);
武毅 已提交
30 31
USE_OP(sum);

Y
Yancey 已提交
32 33 34 35
namespace f = paddle::framework;
namespace p = paddle::platform;
namespace m = paddle::operators::math;

武毅 已提交
36
// global for simplicity.
T
typhoonzero 已提交
37
std::unique_ptr<f::OperatorBase> listen_and_serv_op;
T
typhoonzero 已提交
38
int selected_port;
武毅 已提交
39

40
void InitTensorsInScope(const p::CPUPlace &place, f::Scope *scope) {
Y
Yancey 已提交
41
  p::CPUDeviceContext ctx(place);
Y
Yancey1989 已提交
42 43
  for (int i = 0; i < 2; ++i) {
    auto var_name = paddle::string::Sprintf("x%d", i);
44
    auto var = scope->Var(var_name);
Y
Yancey 已提交
45
    auto tensor = var->GetMutable<f::LoDTensor>();
Y
Yancey1989 已提交
46 47 48 49 50
    tensor->Resize({10, 10});
    float *expect = tensor->mutable_data<float>(place);
    for (int64_t i = 0; i < tensor->numel(); ++i) {
      expect[i] = static_cast<float>(i);
    }
武毅 已提交
51 52
  }

53
  auto out_var = scope->Var("Out");
Y
Yancey 已提交
54
  auto out_tensor = out_var->GetMutable<f::LoDTensor>();
武毅 已提交
55
  out_tensor->Resize({10, 10});
Y
Yancey1989 已提交
56
  out_tensor->mutable_data<float>(place);  // allocate
武毅 已提交
57 58
}

59
void InitSelectedRowsInScope(const p::CPUPlace &place, f::Scope *scope) {
Y
Yancey 已提交
60 61 62 63 64 65
  p::CPUDeviceContext ctx(place);
  int64_t height = 10;
  int64_t row_numel = 10;
  m::SetConstant<p::CPUDeviceContext, float> set_one;
  // init x0
  std::vector<int64_t> rows0{0, 4, 7};
66
  auto x0_var = scope->Var("x0");
Y
Yancey 已提交
67 68 69 70 71 72 73 74 75 76
  auto x0 = x0_var->GetMutable<f::SelectedRows>();
  x0->set_rows(rows0);
  x0->set_height(height);
  auto x0_value = x0->mutable_value();
  x0_value->mutable_data<float>(
      f::make_ddim({static_cast<int64_t>(rows0.size()), row_numel}), place);
  set_one(ctx, x0_value, 1.0);

  // init x1
  std::vector<int64_t> rows1{2, 9};
77
  auto x1_var = scope->Var("x1");
Y
Yancey 已提交
78 79 80 81 82 83 84 85
  auto x1 = x1_var->GetMutable<f::SelectedRows>();
  x1->set_rows(rows1);
  x1->set_height(height);
  auto x1_value = x1->mutable_value();
  x1_value->mutable_data<float>(
      f::make_ddim({static_cast<int64_t>(rows1.size()), row_numel}), place);
  set_one(ctx, x1_value, 1.0);

86
  auto out_var = scope->Var("Out");
Y
Yancey 已提交
87 88 89 90 91 92 93 94
  auto out = out_var->GetMutable<f::SelectedRows>();
  auto out_value = out->mutable_value();
  out->set_height(height);
  out_value->mutable_data<float>(f::make_ddim({5, 10}), place);
}

void AddOp(const std::string &type, const f::VariableNameMap &inputs,
           const f::VariableNameMap &outputs, f::AttributeMap attrs,
W
Wu Yi 已提交
95
           f::BlockDesc *block, bool is_sparse) {
武毅 已提交
96 97 98 99
  // insert output
  for (auto kv : outputs) {
    for (auto v : kv.second) {
      auto var = block->Var(v);
Y
Yancey1989 已提交
100
      var->SetDataType(f::proto::VarType::FP32);
W
Wu Yi 已提交
101 102 103 104
      var->SetPersistable(true);
      if (is_sparse) {
        var->SetType(f::proto::VarType::SELECTED_ROWS);
      }
武毅 已提交
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
    }
  }

  // insert op
  auto op = block->AppendOp();
  op->SetType(type);
  for (auto &kv : inputs) {
    op->SetInput(kv.first, kv.second);
  }
  for (auto &kv : outputs) {
    op->SetOutput(kv.first, kv.second);
  }
  op->SetAttrMap(attrs);
}

F
fix  
fengjiayi 已提交
120
void StartServerNet(bool is_sparse, std::atomic<bool> *initialized) {
Y
Yancey 已提交
121 122
  f::Scope scope;
  p::CPUPlace place;
T
done  
typhoonzero 已提交
123
  VLOG(4) << "before init tensor";
Y
Yancey 已提交
124
  if (is_sparse) {
125
    InitSelectedRowsInScope(place, &scope);
Y
Yancey 已提交
126
  } else {
127
    InitTensorsInScope(place, &scope);
Y
Yancey 已提交
128
  }
T
typhoonzero 已提交
129
  // sub program run in listen_and_serv_op, for simple test we use sum
Y
Yancey 已提交
130
  f::ProgramDesc program;
Q
qiaolongfei 已提交
131 132
  const auto &root_block = program.Block(0);
  auto *optimize_block = program.AppendBlock(root_block);
133
  auto *prefetch_block = program.AppendBlock(root_block);
Q
qiaolongfei 已提交
134
  // X for server side tensors, RX for received tensors, must be of same shape.
W
Wu Yi 已提交
135 136
  AddOp("sum", {{"X", {"x0", "x1"}}}, {{"Out", {"Out"}}}, {}, optimize_block,
        is_sparse);
Y
Yancey 已提交
137
  f::AttributeMap attrs;
T
typhoonzero 已提交
138
  attrs.insert({"endpoint", std::string("127.0.0.1:0")});
Y
Yancey1989 已提交
139
  attrs.insert({"Fanin", 1});
Y
Yancey 已提交
140
  attrs.insert({"ParamList", std::vector<std::string>({"Out"})});
T
typhoonzero 已提交
141
  attrs.insert({"GradList", std::vector<std::string>({"x1"})});
Y
Yancey1989 已提交
142
  attrs.insert({"OptimizeBlock", optimize_block});
143
  attrs.insert({"PrefetchBlock", prefetch_block});
Q
qiaolongfei 已提交
144
  attrs.insert({"grad_to_block_id", std::vector<std::string>({""})});
Q
qiaolongfei 已提交
145
  attrs.insert({"sync_mode", true});
T
done  
typhoonzero 已提交
146
  VLOG(4) << "before init op";
T
typhoonzero 已提交
147
  listen_and_serv_op =
Y
Yancey1989 已提交
148
      f::OpRegistry::CreateOp("listen_and_serv", {{"X", {"x1"}}}, {}, attrs);
F
fix  
fengjiayi 已提交
149
  *initialized = true;
T
typhoonzero 已提交
150
  listen_and_serv_op->Run(scope, place);
T
typhoonzero 已提交
151
  LOG(INFO) << "server exit";
武毅 已提交
152 153
}

Y
Yancey 已提交
154
TEST(SendRecvOp, CPUDense) {
F
fix  
fengjiayi 已提交
155 156 157 158
  std::atomic<bool> initialized{false};
  std::thread server_thread(StartServerNet, false, &initialized);
  while (!initialized) {
  }
T
done  
typhoonzero 已提交
159 160
  static_cast<paddle::operators::ListenAndServOp *>(listen_and_serv_op.get())
      ->WaitServerReady();
161

武毅 已提交
162
  // local net
Y
Yancey 已提交
163 164
  f::Scope scope;
  p::CPUPlace place;
165
  InitTensorsInScope(place, &scope);
Y
Yancey1989 已提交
166 167
  // create rpc client var
  scope.Var("RPC_CLIENT_VAR");
武毅 已提交
168

Y
Yancey 已提交
169
  f::AttributeMap attrs;
F
fix  
fengjiayi 已提交
170 171 172 173 174
  auto *listen_and_serv_op_ptr =
      static_cast<paddle::operators::ListenAndServOp *>(
          listen_and_serv_op.get());
  ASSERT_TRUE(listen_and_serv_op_ptr != nullptr);
  selected_port = listen_and_serv_op_ptr->GetSelectedPort();
T
typhoonzero 已提交
175 176 177
  std::string endpoint = paddle::string::Sprintf("127.0.0.1:%d", selected_port);
  attrs.insert({"endpoints", std::vector<std::string>({endpoint})});
  attrs.insert({"epmap", std::vector<std::string>({endpoint})});
Y
Yancey1989 已提交
178 179
  auto send_op = f::OpRegistry::CreateOp(
      "send", {{"X", {"x1"}}},
Y
Yancey1989 已提交
180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
      {{"Out", {"Out"}}, attrs);
    send_op->Run(scope, place);

    auto in_var = scope.Var("x1");
    auto tensor = in_var->GetMutable<f::LoDTensor>();
    float *expected = tensor->data<float>();
    auto out_var = scope.Var("Out");
    auto target = out_var->GetMutable<f::LoDTensor>();
    // x1 * 2 == x0
    EXPECT_NE(target->memory_size(), size_t(0));
    float *actual = target->data<float>();
    for (int64_t i = 0; i < target->numel(); ++i) {
      EXPECT_EQ(expected[i] * 2, actual[i]);
    }
    listen_and_serv_op->Stop();
    server_thread.join();
    listen_and_serv_op.reset(nullptr);
    paddle::operators::ListenAndServOp::ResetPort();
Y
Yancey 已提交
198
}
T
typhoonzero 已提交
199

Y
Yancey 已提交
200
TEST(SendRecvOp, CPUSparse) {
Y
Yancey1989 已提交
201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250
    std::atomic<bool> initialized;
    initialized = false;
    std::thread server_thread(StartServerNet, true, &initialized);
    while (!initialized) {
    }
    auto *listen_and_serv_op_ptr =
        static_cast<paddle::operators::ListenAndServOp *>(
            listen_and_serv_op.get());
    ASSERT_TRUE(listen_and_serv_op_ptr != nullptr);
    listen_and_serv_op_ptr->WaitServerReady();

    // local net
    f::Scope scope;
    p::CPUPlace place;
    p::CPUDeviceContext ctx(place);
    InitSelectedRowsInScope(place, &scope);
    scope.Var("RPC_CLIENT_VAR");
    f::AttributeMap attrs;
    selected_port = listen_and_serv_op_ptr->GetSelectedPort();
    std::string endpoint =
        paddle::string::Sprintf("127.0.0.1:%d", selected_port);
    attrs.insert({"endpoints", std::vector<std::string>({endpoint})});
    attrs.insert({"epmap", std::vector<std::string>({endpoint})});
    auto send_op = f::OpRegistry::CreateOp("send", {{"X", {"x1"}}},
                                           {{"Out", {"Out"}}}, attrs);
    send_op->Run(scope, place);

    auto x0 = scope.Var("x0")->GetMutable<f::SelectedRows>();
    auto x1 = scope.Var("x1")->GetMutable<f::SelectedRows>();
    auto out = scope.Var("Out")->GetMutable<f::SelectedRows>();
    auto actual = out->mutable_value();

    std::unique_ptr<f::SelectedRows> expect{new f::SelectedRows()};
    auto expect_value = expect->mutable_value();
    expect_value->mutable_data<float>(f::make_ddim({5, 10}), place);

    m::SelectedRowsAdd<p::CPUDeviceContext, float> add_functor;
    add_functor(ctx, *x0, *x1, expect.get());

    EXPECT_EQ(actual->numel(), expect_value->numel());
    EXPECT_EQ(out->rows().size(), x0->rows().size() + x1->rows().size());

    for (int64_t i = 0; i < expect_value->numel(); ++i) {
      EXPECT_EQ(expect_value->mutable_data<float>(place)[i],
                actual->mutable_data<float>(place)[i]);
    }
    listen_and_serv_op->Stop();
    server_thread.join();
    listen_and_serv_op.reset();
    paddle::operators::ListenAndServOp::ResetPort();
武毅 已提交
251
}