test_layer.cc 15.1 KB
Newer Older
J
Jiabin Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

//
// Created by Jiabin on 2019-08-16.
//

#include <paddle/fluid/framework/op_registry.h>
20

J
Jiabin Yang 已提交
21 22 23
#include <memory>
#include <string>
#include <vector>
24

J
Jiabin Yang 已提交
25
#include "gtest/gtest.h"
26 27 28
#include "paddle/fluid/imperative/execution_context.h"
#include "paddle/fluid/imperative/infer_shape_context.h"
#include "paddle/fluid/imperative/infer_var_type_context.h"
J
Jiabin Yang 已提交
29 30 31 32 33 34 35 36 37 38 39 40 41
#include "paddle/fluid/imperative/layer.h"

namespace imperative = paddle::imperative;
namespace platform = paddle::platform;
namespace framework = paddle::framework;

namespace paddle {
namespace imperative {

using vb_vector = std::vector<std::shared_ptr<imperative::VarBase>>;

using var_pair = std::pair<std::string, vb_vector>;

42 43
extern void TestSetForwardDataTypeOfGradVarsEager(
    const NameVarMap<egr::EagerVariable>& outs);
44 45 46 47
template <typename VarType>
class TestRuntimeInferVarTypeContext
    : public RuntimeInferVarTypeContext<VarType> {
 public:
48 49 50 51 52 53
  TestRuntimeInferVarTypeContext(
      const NameVarMap<VarType>& inputs, const NameVarMap<VarType>& outputs,
      const framework::AttributeMap& attrs_map,
      const framework::AttributeMap& default_attrs_map)
      : RuntimeInferVarTypeContext<VarType>(inputs, outputs, attrs_map,
                                            default_attrs_map) {}
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114

  bool HasVar(const std::string& name) const {
    return RuntimeInferVarTypeContext<VarType>::HasVar(name);
  }

  const std::vector<std::string>& InputVars(const std::string& name) const {
    return RuntimeInferVarTypeContext<VarType>::InputVars(name);
  }

  const std::vector<std::string>& OutputVars(const std::string& name) const {
    return RuntimeInferVarTypeContext<VarType>::OutputVars(name);
  }

  framework::proto::VarType::Type GetVarType(const std::string& name) const {
    return RuntimeInferVarTypeContext<VarType>::GetVarType(name);
  }

  void SetVarType(const std::string& name,
                  framework::proto::VarType::Type type) {
    RuntimeInferVarTypeContext<VarType>::SetVarType(name, type);
  }

  framework::proto::VarType::Type GetVarDataType(
      const std::string& name) const {
    return RuntimeInferVarTypeContext<VarType>::GetVarDataType(name);
  }

  void SetVarDataType(const std::string& name,
                      framework::proto::VarType::Type type) {
    RuntimeInferVarTypeContext<VarType>::SetVarDataType(name, type);
  }

  std::vector<framework::proto::VarType::Type> GetVarDataTypes(
      const std::string& name) const {
    return RuntimeInferVarTypeContext<VarType>::GetVarDataTypes(name);
  }

  void SetVarDataTypes(
      const std::string& name,
      const std::vector<framework::proto::VarType::Type>& multiple_data_type) {
    RuntimeInferVarTypeContext<VarType>::SetVarDataTypes(name,
                                                         multiple_data_type);
  }

  std::vector<int64_t> GetVarShape(const std::string& name) const {
    return RuntimeInferVarTypeContext<VarType>::GetVarShape(name);
  }

  void SetVarShape(const std::string& name, const std::vector<int64_t>& dims) {
    RuntimeInferVarTypeContext<VarType>::SetVarShape(name, dims);
  }

  int32_t GetVarLoDLevel(const std::string& name) const {
    return RuntimeInferVarTypeContext<VarType>::GetVarLoDLevel(name);
  }

  void SetVarLoDLevel(const std::string& name, int32_t lod_level) {
    RuntimeInferVarTypeContext<VarType>::SetVarLoDLevel(name, lod_level);
  }
};

J
Jiabin Yang 已提交
115 116 117
TEST(test_layer, test_runtime_context) {
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
118 119
  std::shared_ptr<imperative::VarBase> vin_b(
      new imperative::VarBase(false, "vin_b"));
J
Jiabin Yang 已提交
120 121
  std::shared_ptr<imperative::VarBase> vout(
      new imperative::VarBase(false, "vout"));
122 123 124 125
  std::shared_ptr<imperative::VarBase> vout_b(
      new imperative::VarBase(false, "vout_b"));
  var_pair in_pair = var_pair("X", {vin, vin_b});
  var_pair out_pair = var_pair("Out", {vout, vout_b});
J
Jiabin Yang 已提交
126 127 128
  imperative::NameVarBaseMap ins = {in_pair};
  imperative::NameVarBaseMap outs = {out_pair};
  framework::AttributeMap attrs;
129 130 131

  auto* ctx =
      new imperative::TestRuntimeInferVarTypeContext<imperative::VarBase>(
132
          ins, outs, attrs, {});
133

J
Jiabin Yang 已提交
134 135 136
  ASSERT_TRUE(ctx->HasInput("X"));
  ASSERT_TRUE(ctx->HasOutput("Out"));

137 138 139 140 141 142 143 144 145 146 147
  ASSERT_EQ(2u, ctx->InputSize("X"));
  ASSERT_EQ("vin", ctx->InputVarName("X", 0));

  ASSERT_TRUE(ctx->InputTypeAnyOf("X", framework::proto::VarType::LOD_TENSOR));
  ASSERT_TRUE(ctx->InputTypeAllOf("X", framework::proto::VarType::LOD_TENSOR));

  ASSERT_EQ(framework::proto::VarType::LOD_TENSOR, ctx->GetInputType("X"));
  ASSERT_EQ(framework::proto::VarType::FP32, ctx->GetInputDataType("X"));

  ctx->SyncTypeAndDataType("X", "Out");

J
Jiabin Yang 已提交
148 149
  // Remove DataType check, because it doesn't make sense of set dtype in
  // dygraph
150 151 152 153 154 155 156 157 158 159 160 161 162

  ASSERT_EQ(framework::proto::VarType::LOD_TENSOR, ctx->GetOutputType("Out"));

  ctx->SetOutputType("Out", framework::proto::VarType::SELECTED_ROWS,
                     framework::ALL_ELEMENTS);
  ctx->SetOutputType("Out", framework::proto::VarType::LOD_TENSOR_ARRAY);
  ASSERT_EQ(framework::proto::VarType::LOD_TENSOR_ARRAY, vout->Type());
  ASSERT_EQ(framework::proto::VarType::SELECTED_ROWS, vout_b->Type());

  ctx->SetOutputDataType("Out", framework::proto::VarType::FP64,
                         framework::ALL_ELEMENTS);
  ctx->SetOutputDataType("Out", framework::proto::VarType::INT8);

J
Jiabin Yang 已提交
163 164
  // Remove DataType check, because it doesn't make sense of set dtype in
  // dygraph
165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181

  // no throw, but do nothing
  ASSERT_NO_THROW(
      ctx->InsertVar("vout", framework::proto::VarType::LOD_TENSOR));
  ASSERT_EQ(framework::proto::VarType::LOD_TENSOR_ARRAY, vout->Type());

  ASSERT_ANY_THROW(ctx->HasVar("vin"));
  ASSERT_ANY_THROW(ctx->InputVars("X"));
  ASSERT_ANY_THROW(ctx->OutputVars("Out"));
  ASSERT_ANY_THROW(ctx->GetVarType("vin"));
  ASSERT_ANY_THROW(
      ctx->SetVarType("vin", framework::proto::VarType::LOD_TENSOR));
  ASSERT_ANY_THROW(ctx->GetVarDataType("vin"));
  ASSERT_ANY_THROW(
      ctx->SetVarDataType("vout", framework::proto::VarType::FP32));

  ASSERT_ANY_THROW(ctx->GetVarDataTypes("vin"));
J
Jiabin Yang 已提交
182
  std::vector<framework::proto::VarType::Type> NullType;
183 184 185 186 187 188 189
  ASSERT_ANY_THROW(ctx->SetVarDataTypes("vin", NullType));
  ASSERT_ANY_THROW(ctx->GetVarShape("vin"));
  ASSERT_ANY_THROW(ctx->SetVarShape("vin", {}));
  ASSERT_ANY_THROW(ctx->GetVarLoDLevel("vin"));
  ASSERT_ANY_THROW(ctx->SetVarLoDLevel("vin", 2));

  ASSERT_TRUE(ctx->IsDygraph());
J
Jiabin Yang 已提交
190 191
}

192 193 194
std::string LayerDebugString(const std::string& op_type,
                             const NameVarBaseMap& ins,
                             const NameVarBaseMap& outs);
J
Jiabin Yang 已提交
195

196 197
TEST(test_layer, test_debug_string) {
  platform::CPUPlace place;
J
Jiabin Yang 已提交
198 199 200
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  var_pair in_pair = var_pair("X", vb_vector(1, vin));
201

202
  auto test_func = [&](std::shared_ptr<imperative::VarBase>& vout) {
203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242
    var_pair out_pair = var_pair("Out", vb_vector(1, vout));
    imperative::NameVarBaseMap ins = {in_pair};
    imperative::NameVarBaseMap outs = {out_pair};
    return LayerDebugString("test_op", ins, outs);
  };

  // 1. test null
  std::shared_ptr<imperative::VarBase> null_out(nullptr);
  std::string res_null = test_func(null_out);
  ASSERT_TRUE(res_null.find("NULL") != std::string::npos);

  // 2. test uninit var
  std::shared_ptr<imperative::VarBase> un_init_out(
      new imperative::VarBase(false, "un_init_out"));
  std::string res_un_init = test_func(un_init_out);
  ASSERT_TRUE(res_un_init.find("NOT_INITED_VAR") != std::string::npos);

  // 3. test unresolved type
  std::shared_ptr<imperative::VarBase> ut_out(
      new imperative::VarBase(false, "ut_out"));
  ut_out->MutableVar()->GetMutable<framework::LoDTensorArray>();
  std::string res_ut = test_func(ut_out);
  ASSERT_TRUE(res_ut.find("UNRESOLVED_TYPE") != std::string::npos);

  // 4. test uninit lod tensor
  std::shared_ptr<imperative::VarBase> lod_tensor(
      new imperative::VarBase(false, "lod_tensor"));
  auto tensor_l = lod_tensor->MutableVar()->GetMutable<framework::LoDTensor>();
  std::string res_ui_lod_t = test_func(lod_tensor);
  ASSERT_TRUE(res_ui_lod_t.find("NOT_INITED") != std::string::npos);

  // 5. test init lod tensor
  tensor_l->mutable_data<float>(place);
  std::string res_lod_t = test_func(lod_tensor);
  ASSERT_TRUE(res_lod_t.find("LoDTensor") != std::string::npos);

  // 6. test uninit selected rows
  std::shared_ptr<imperative::VarBase> selected_rows(
      new imperative::VarBase(false, "selected_rows"));
  auto tensor_sr = selected_rows->MutableVar()
243
                       ->GetMutable<phi::SelectedRows>()
244 245 246 247 248 249 250 251
                       ->mutable_value();
  std::string res_ui_sr = test_func(selected_rows);
  ASSERT_TRUE(res_ui_sr.find("NOT_INITED") != std::string::npos);

  // 7. test init selected rows
  tensor_sr->mutable_data<float>(place);
  std::string res_sr = test_func(selected_rows);
  ASSERT_TRUE(res_sr.find("SelectedRows") != std::string::npos);
J
Jiabin Yang 已提交
252 253
}

254
static std::shared_ptr<imperative::GradOpNode> CreateGradNode(
255 256 257
    size_t id, const std::string& type, const imperative::NameVarBaseMap& ins,
    const imperative::NameVarBaseMap& outs,
    const framework::AttributeMap& attrs, const platform::Place& place) {
258
  auto node = std::make_shared<imperative::GradOpNode>();
259
  auto* op = &(node->emplace_back());
260 261 262 263
  op->SetId(id);
  op->SetPlace(place);
  op->SetType(type);
  op->SetAttrMap(attrs);
264
  for (auto& pair : ins) {
265
    std::vector<std::shared_ptr<VariableWrapper>> vars;
266
    for (auto& var : pair.second) {
267 268
      vars.emplace_back(var->SharedVar());
    }
269
    op->SetInput(pair.first, vars, false);
270 271
  }

272
  for (auto& pair : outs) {
273
    std::vector<std::shared_ptr<VariableWrapper>> vars;
274
    for (auto& var : pair.second) {
275 276
      vars.emplace_back(var->SharedVar());
    }
277
    op->SetOutput(pair.first, vars, false);
278 279
  }

280
  return node;
281 282
}

J
Jiabin Yang 已提交
283 284 285 286 287 288 289 290 291 292 293 294 295 296
TEST(test_layer, test_clear_backward_info) {
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  std::shared_ptr<imperative::VarBase> vout(
      new imperative::VarBase(false, "vout"));
  framework::OpDesc desc;
  platform::CPUPlace place;
  var_pair x_pair = var_pair("X", vb_vector(1, vin));
  var_pair y_pair = var_pair("Y", vb_vector(1, vin));
  var_pair out_pair = var_pair("Out", vb_vector(1, vout));
  imperative::NameVarBaseMap ins = {x_pair, y_pair};
  imperative::NameVarBaseMap outs = {out_pair};
  framework::AttributeMap concat_att_map;
  concat_att_map["axis"] = 1;
297

298 299 300 301 302 303
  auto node = CreateGradNode(0, "mul", ins, outs, concat_att_map, place);
  auto pending_node =
      CreateGradNode(0, "mul", ins, outs, concat_att_map, place);
  node->InsertGradPendingNode(pending_node);

  ASSERT_EQ(node->size(), 1UL);
304
  auto* op = &(node->back());
305

306 307
  ASSERT_GT(op->GetInsMap().size(), 0UL);
  ASSERT_GT(op->GetOutsMap().size(), 0UL);
J
Jiabin Yang 已提交
308 309 310

  op->ClearBackwardTrace();

311 312
  ASSERT_EQ(op->GetInsMap().size(), 0UL);
  ASSERT_EQ(op->GetOutsMap().size(), 0UL);
J
Jiabin Yang 已提交
313 314 315 316 317 318 319 320 321
}

TEST(test_layer, test_varbase_basic) {
  platform::CPUPlace place;
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  vin->MutableVar()->GetMutable<framework::LoDTensor>()->mutable_data<float>(
      place);
  std::shared_ptr<imperative::VarBase> vout(vin->NewVarBase(place, false));
322
  ASSERT_EQ(vout->Name(), "vin0");
J
Jiabin Yang 已提交
323 324 325 326

  std::shared_ptr<imperative::VarBase> vin_with_grad(
      new imperative::VarBase(true, "vin"));
  ASSERT_ANY_THROW(vin->MutableGradVar());
327
  ASSERT_NO_THROW(ASSERT_TRUE(dynamic_cast<framework::Variable*>(
J
Jiabin Yang 已提交
328
                                  vin_with_grad->MutableGradVar()) != 0));
329 330
  ASSERT_TRUE(
      dynamic_cast<framework::Variable*>(vin_with_grad->MutableGradVar()) != 0);
331 332
  vin_with_grad->SetOverridedStopGradient(false);
  ASSERT_FALSE(vin_with_grad->OverridedStopGradient());
J
Jiabin Yang 已提交
333
  ASSERT_NO_FATAL_FAILURE(vin_with_grad->SetPersistable(true));
334
  ASSERT_FALSE(vin_with_grad->OverridedStopGradient());
J
Jiabin Yang 已提交
335 336 337 338 339
  ASSERT_NO_FATAL_FAILURE(vin_with_grad->SetName("new_name"));
  ASSERT_EQ(vin_with_grad->Name(), "new_name");
}
// TODO(jiabin): Add more ut here for layer

H
hong 已提交
340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358
TEST(test_layer, test_dygraph_execution_context) {
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  std::shared_ptr<imperative::VarBase> vout(
      new imperative::VarBase(false, "vout"));
  framework::OpDesc desc;
  platform::CPUPlace place;
  var_pair x_pair = var_pair("X", vb_vector(1, vin));
  var_pair y_pair = var_pair("Y", vb_vector(1, vin));
  var_pair out_pair = var_pair("Out", vb_vector(1, vout));
  imperative::NameVarBaseMap ins = {x_pair, y_pair};
  imperative::NameVarBaseMap outs = {out_pair};

  framework::AttributeMap concat_att_map;
  concat_att_map["axis"] = 1;

  auto op = framework::OpRegistry::CreateOp("mul", {}, {}, {}, false);
  paddle::platform::CPUPlace cpu_place;

359
  paddle::platform::DeviceContextPool& pool =
H
hong 已提交
360
      paddle::platform::DeviceContextPool::Instance();
361
  auto* dev_ctx = pool.Get(cpu_place);
H
hong 已提交
362 363 364
  paddle::framework::RuntimeContext ctx({}, {});
  framework::Scope scope;

365
  DygraphExecutionContext<imperative::VarBase> dy_exe_context(
366
      *(op.get()), scope, *dev_ctx, ctx, ins, outs, concat_att_map, {});
H
hong 已提交
367 368 369 370 371

  ASSERT_EQ(dy_exe_context.InputSize("X"), 1u);
  ASSERT_EQ(dy_exe_context.InputName("X"), "vin");
  ASSERT_EQ(dy_exe_context.HasAttr("axis"), true);
  auto attr_map = dy_exe_context.Attrs();
372
  ASSERT_EQ(BOOST_GET(int, attr_map["axis"]), 1);
H
hong 已提交
373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392
  ASSERT_EQ(dy_exe_context.OutputSize("Out"), 1u);
  ASSERT_EQ(dy_exe_context.HasOutput("Out"), true);
}

TEST(test_layer, test_dygraph_infershape_context) {
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  std::shared_ptr<imperative::VarBase> vout(
      new imperative::VarBase(false, "vout"));
  framework::OpDesc desc;
  platform::CPUPlace place;
  var_pair x_pair = var_pair("X", vb_vector(1, vin));
  var_pair y_pair = var_pair("Y", vb_vector(1, vin));
  var_pair out_pair = var_pair("Out", vb_vector(1, vout));
  imperative::NameVarBaseMap ins = {x_pair, y_pair};
  imperative::NameVarBaseMap outs = {out_pair};

  framework::AttributeMap concat_att_map;
  concat_att_map["axis"] = 1;

393
  DygraphInferShapeContext<imperative::VarBase> infer_shape_ctx(
394
      &ins, &outs, &concat_att_map, {}, "dummy");
H
hong 已提交
395 396 397 398 399 400 401

  bool have_x = infer_shape_ctx.HasOutputs("Out");
  ASSERT_EQ(have_x, true);
  bool have_z = infer_shape_ctx.HasOutputs("Z");
  ASSERT_EQ(have_z, false);
}

402 403 404 405 406 407 408 409 410
TEST(test_layer, test_inner_op_not_inited) {
  OpBase op;
  std::string kUnknown = "unknown";
  ASSERT_EQ(op.Type(), kUnknown);
  ASSERT_THROW(op.Info(), platform::EnforceNotMet);
  ASSERT_THROW(op.InnerOp(), platform::EnforceNotMet);
  ASSERT_THROW(op.CheckAttrs(), platform::EnforceNotMet);
}

411 412 413 414 415
TEST(test_layer, test_eager) {
  imperative::NameTensorMap ins = {};
  TestSetForwardDataTypeOfGradVarsEager(ins);
}

J
Jiabin Yang 已提交
416 417 418
}  // namespace imperative
}  // namespace paddle

419
USE_OP_ITSELF(mul);