test_layer.cc 10.4 KB
Newer Older
J
Jiabin Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

//
// Created by Jiabin on 2019-08-16.
//

#include <paddle/fluid/framework/op_registry.h>
#include <memory>
#include <string>
#include <vector>
#include "gtest/gtest.h"
24 25 26
#include "paddle/fluid/imperative/execution_context.h"
#include "paddle/fluid/imperative/infer_shape_context.h"
#include "paddle/fluid/imperative/infer_var_type_context.h"
J
Jiabin Yang 已提交
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
#include "paddle/fluid/imperative/layer.h"

namespace imperative = paddle::imperative;
namespace platform = paddle::platform;
namespace framework = paddle::framework;

namespace paddle {
namespace imperative {

using vb_vector = std::vector<std::shared_ptr<imperative::VarBase>>;

using var_pair = std::pair<std::string, vb_vector>;

TEST(test_layer, test_runtime_context) {
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  std::shared_ptr<imperative::VarBase> vout(
      new imperative::VarBase(false, "vout"));
  var_pair in_pair = var_pair("X", vb_vector(1, vin));
  var_pair out_pair = var_pair("Out", vb_vector(1, vout));
  imperative::NameVarBaseMap ins = {in_pair};
  imperative::NameVarBaseMap outs = {out_pair};
  framework::AttributeMap attrs;
50
  auto *ctx = new imperative::RuntimeInferVarTypeContext<imperative::VarBase>(
51
      ins, outs, attrs);
J
Jiabin Yang 已提交
52 53 54 55 56 57 58 59 60 61 62 63
  ASSERT_TRUE(ctx->HasVar("vin"));
  ASSERT_TRUE(ctx->HasInput("X"));
  ASSERT_TRUE(ctx->HasOutput("Out"));

  ASSERT_ANY_THROW(ctx->GetDataTypes("vin"));
  std::vector<framework::proto::VarType::Type> NullType;
  ASSERT_ANY_THROW(ctx->SetDataTypes("vin", NullType));
  ASSERT_ANY_THROW(ctx->GetShape("vin"));
  ASSERT_ANY_THROW(ctx->GetLoDLevel("vin"));
  ASSERT_ANY_THROW(ctx->SetLoDLevel("vin", 2));
}

64 65 66
std::string LayerDebugString(const std::string &op_type,
                             const NameVarBaseMap &ins,
                             const NameVarBaseMap &outs);
J
Jiabin Yang 已提交
67

68 69
TEST(test_layer, test_debug_string) {
  platform::CPUPlace place;
J
Jiabin Yang 已提交
70 71 72
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  var_pair in_pair = var_pair("X", vb_vector(1, vin));
73

74
  auto test_func = [&](std::shared_ptr<imperative::VarBase> &vout) {
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123
    var_pair out_pair = var_pair("Out", vb_vector(1, vout));
    imperative::NameVarBaseMap ins = {in_pair};
    imperative::NameVarBaseMap outs = {out_pair};
    return LayerDebugString("test_op", ins, outs);
  };

  // 1. test null
  std::shared_ptr<imperative::VarBase> null_out(nullptr);
  std::string res_null = test_func(null_out);
  ASSERT_TRUE(res_null.find("NULL") != std::string::npos);

  // 2. test uninit var
  std::shared_ptr<imperative::VarBase> un_init_out(
      new imperative::VarBase(false, "un_init_out"));
  std::string res_un_init = test_func(un_init_out);
  ASSERT_TRUE(res_un_init.find("NOT_INITED_VAR") != std::string::npos);

  // 3. test unresolved type
  std::shared_ptr<imperative::VarBase> ut_out(
      new imperative::VarBase(false, "ut_out"));
  ut_out->MutableVar()->GetMutable<framework::LoDTensorArray>();
  std::string res_ut = test_func(ut_out);
  ASSERT_TRUE(res_ut.find("UNRESOLVED_TYPE") != std::string::npos);

  // 4. test uninit lod tensor
  std::shared_ptr<imperative::VarBase> lod_tensor(
      new imperative::VarBase(false, "lod_tensor"));
  auto tensor_l = lod_tensor->MutableVar()->GetMutable<framework::LoDTensor>();
  std::string res_ui_lod_t = test_func(lod_tensor);
  ASSERT_TRUE(res_ui_lod_t.find("NOT_INITED") != std::string::npos);

  // 5. test init lod tensor
  tensor_l->mutable_data<float>(place);
  std::string res_lod_t = test_func(lod_tensor);
  ASSERT_TRUE(res_lod_t.find("LoDTensor") != std::string::npos);

  // 6. test uninit selected rows
  std::shared_ptr<imperative::VarBase> selected_rows(
      new imperative::VarBase(false, "selected_rows"));
  auto tensor_sr = selected_rows->MutableVar()
                       ->GetMutable<framework::SelectedRows>()
                       ->mutable_value();
  std::string res_ui_sr = test_func(selected_rows);
  ASSERT_TRUE(res_ui_sr.find("NOT_INITED") != std::string::npos);

  // 7. test init selected rows
  tensor_sr->mutable_data<float>(place);
  std::string res_sr = test_func(selected_rows);
  ASSERT_TRUE(res_sr.find("SelectedRows") != std::string::npos);
J
Jiabin Yang 已提交
124 125
}

126
static std::shared_ptr<imperative::GradOpNode> CreateGradNode(
127 128 129
    size_t id, const std::string &type, const imperative::NameVarBaseMap &ins,
    const imperative::NameVarBaseMap &outs,
    const framework::AttributeMap &attrs, const platform::Place &place) {
130 131
  auto node = std::make_shared<imperative::GradOpNode>();
  auto *op = &(node->emplace_back());
132 133 134 135 136 137 138 139 140
  op->SetId(id);
  op->SetPlace(place);
  op->SetType(type);
  op->SetAttrMap(attrs);
  for (auto &pair : ins) {
    std::vector<std::shared_ptr<VariableWrapper>> vars;
    for (auto &var : pair.second) {
      vars.emplace_back(var->SharedVar());
    }
141
    op->SetInput(pair.first, vars, false);
142 143 144 145 146 147 148
  }

  for (auto &pair : outs) {
    std::vector<std::shared_ptr<VariableWrapper>> vars;
    for (auto &var : pair.second) {
      vars.emplace_back(var->SharedVar());
    }
149
    op->SetOutput(pair.first, vars, false);
150 151
  }

152
  return node;
153 154
}

J
Jiabin Yang 已提交
155 156 157 158 159 160 161 162 163 164 165 166 167 168
TEST(test_layer, test_clear_backward_info) {
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  std::shared_ptr<imperative::VarBase> vout(
      new imperative::VarBase(false, "vout"));
  framework::OpDesc desc;
  platform::CPUPlace place;
  var_pair x_pair = var_pair("X", vb_vector(1, vin));
  var_pair y_pair = var_pair("Y", vb_vector(1, vin));
  var_pair out_pair = var_pair("Out", vb_vector(1, vout));
  imperative::NameVarBaseMap ins = {x_pair, y_pair};
  imperative::NameVarBaseMap outs = {out_pair};
  framework::AttributeMap concat_att_map;
  concat_att_map["axis"] = 1;
169

170 171 172 173 174 175 176
  auto node = CreateGradNode(0, "mul", ins, outs, concat_att_map, place);
  auto pending_node =
      CreateGradNode(0, "mul", ins, outs, concat_att_map, place);
  node->InsertGradPendingNode(pending_node);

  ASSERT_EQ(node->size(), 1UL);
  auto *op = &(node->back());
177

178 179
  ASSERT_GT(op->GetInsMap().size(), 0UL);
  ASSERT_GT(op->GetOutsMap().size(), 0UL);
J
Jiabin Yang 已提交
180 181 182

  op->ClearBackwardTrace();

183 184
  ASSERT_EQ(op->GetInsMap().size(), 0UL);
  ASSERT_EQ(op->GetOutsMap().size(), 0UL);
J
Jiabin Yang 已提交
185 186 187 188 189 190 191 192 193
}

TEST(test_layer, test_varbase_basic) {
  platform::CPUPlace place;
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  vin->MutableVar()->GetMutable<framework::LoDTensor>()->mutable_data<float>(
      place);
  std::shared_ptr<imperative::VarBase> vout(vin->NewVarBase(place, false));
194
  ASSERT_EQ(vout->Name(), "vin0");
J
Jiabin Yang 已提交
195 196 197 198

  std::shared_ptr<imperative::VarBase> vin_with_grad(
      new imperative::VarBase(true, "vin"));
  ASSERT_ANY_THROW(vin->MutableGradVar());
199
  ASSERT_NO_THROW(ASSERT_TRUE(dynamic_cast<framework::Variable *>(
J
Jiabin Yang 已提交
200
                                  vin_with_grad->MutableGradVar()) != 0));
201 202
  ASSERT_TRUE(dynamic_cast<framework::Variable *>(
                  vin_with_grad->MutableGradVar()) != 0);
203 204
  vin_with_grad->SetOverridedStopGradient(false);
  ASSERT_FALSE(vin_with_grad->OverridedStopGradient());
J
Jiabin Yang 已提交
205
  ASSERT_NO_FATAL_FAILURE(vin_with_grad->SetPersistable(true));
206
  ASSERT_FALSE(vin_with_grad->OverridedStopGradient());
J
Jiabin Yang 已提交
207 208 209 210 211
  ASSERT_NO_FATAL_FAILURE(vin_with_grad->SetName("new_name"));
  ASSERT_EQ(vin_with_grad->Name(), "new_name");
}
// TODO(jiabin): Add more ut here for layer

H
hong 已提交
212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230
TEST(test_layer, test_dygraph_execution_context) {
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  std::shared_ptr<imperative::VarBase> vout(
      new imperative::VarBase(false, "vout"));
  framework::OpDesc desc;
  platform::CPUPlace place;
  var_pair x_pair = var_pair("X", vb_vector(1, vin));
  var_pair y_pair = var_pair("Y", vb_vector(1, vin));
  var_pair out_pair = var_pair("Out", vb_vector(1, vout));
  imperative::NameVarBaseMap ins = {x_pair, y_pair};
  imperative::NameVarBaseMap outs = {out_pair};

  framework::AttributeMap concat_att_map;
  concat_att_map["axis"] = 1;

  auto op = framework::OpRegistry::CreateOp("mul", {}, {}, {}, false);
  paddle::platform::CPUPlace cpu_place;

231
  paddle::platform::DeviceContextPool &pool =
H
hong 已提交
232
      paddle::platform::DeviceContextPool::Instance();
233
  auto *dev_ctx = pool.Get(cpu_place);
H
hong 已提交
234 235 236
  paddle::framework::RuntimeContext ctx({}, {});
  framework::Scope scope;

237
  DygraphExecutionContext<imperative::VarBase> dy_exe_context(
238
      *(op.get()), scope, *dev_ctx, ctx, ins, outs, concat_att_map);
H
hong 已提交
239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264

  ASSERT_EQ(dy_exe_context.InputSize("X"), 1u);
  ASSERT_EQ(dy_exe_context.InputName("X"), "vin");
  ASSERT_EQ(dy_exe_context.HasAttr("axis"), true);
  auto attr_map = dy_exe_context.Attrs();
  ASSERT_EQ(boost::get<int>(attr_map["axis"]), 1);
  ASSERT_EQ(dy_exe_context.OutputSize("Out"), 1u);
  ASSERT_EQ(dy_exe_context.HasOutput("Out"), true);
}

TEST(test_layer, test_dygraph_infershape_context) {
  std::shared_ptr<imperative::VarBase> vin(
      new imperative::VarBase(false, "vin"));
  std::shared_ptr<imperative::VarBase> vout(
      new imperative::VarBase(false, "vout"));
  framework::OpDesc desc;
  platform::CPUPlace place;
  var_pair x_pair = var_pair("X", vb_vector(1, vin));
  var_pair y_pair = var_pair("Y", vb_vector(1, vin));
  var_pair out_pair = var_pair("Out", vb_vector(1, vout));
  imperative::NameVarBaseMap ins = {x_pair, y_pair};
  imperative::NameVarBaseMap outs = {out_pair};

  framework::AttributeMap concat_att_map;
  concat_att_map["axis"] = 1;

265 266
  DygraphInferShapeContext<imperative::VarBase> infer_shape_ctx(
      &ins, &outs, &concat_att_map);
H
hong 已提交
267 268 269 270 271 272 273

  bool have_x = infer_shape_ctx.HasOutputs("Out");
  ASSERT_EQ(have_x, true);
  bool have_z = infer_shape_ctx.HasOutputs("Z");
  ASSERT_EQ(have_z, false);
}

274 275 276 277 278 279 280 281 282
TEST(test_layer, test_inner_op_not_inited) {
  OpBase op;
  std::string kUnknown = "unknown";
  ASSERT_EQ(op.Type(), kUnknown);
  ASSERT_THROW(op.Info(), platform::EnforceNotMet);
  ASSERT_THROW(op.InnerOp(), platform::EnforceNotMet);
  ASSERT_THROW(op.CheckAttrs(), platform::EnforceNotMet);
}

J
Jiabin Yang 已提交
283 284 285 286
}  // namespace imperative
}  // namespace paddle

USE_OP(mul);