layer.h 12.0 KB
Newer Older
J
Jiabin Yang 已提交
1
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once
J
Jiabin Yang 已提交
16 17
#include <algorithm>
#include <atomic>
Z
Zeng Jinle 已提交
18
#include <cstdint>
J
Jiabin Yang 已提交
19
#include <list>
Z
Zeng Jinle 已提交
20 21 22 23
#include <map>     // NOLINT
#include <memory>  // NOLINT
#include <mutex>   // NOLINT
#include <set>
24
#include <string>         // NOLINT
M
minqiyang 已提交
25
#include <unordered_map>  // NOLINT
26
#include <utility>
J
Jiabin Yang 已提交
27
#include <vector>
28 29
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/operator.h"
M
minqiyang 已提交
30
#include "paddle/fluid/framework/var_type_inference.h"
J
Jiabin Yang 已提交
31
#include "paddle/fluid/framework/variable.h"
Z
Zeng Jinle 已提交
32
#include "paddle/fluid/imperative/flags.h"
J
Jiabin Yang 已提交
33 34 35
#include "paddle/fluid/imperative/type_defs.h"
#include "paddle/fluid/platform/enforce.h"
#include "paddle/fluid/platform/macros.h"
M
minqiyang 已提交
36

37 38 39 40 41
namespace paddle {
namespace imperative {

class OpBase;

Z
Zeng Jinle 已提交
42 43 44 45 46 47 48 49 50 51 52 53 54
class ThreadSafeNameSet {
 public:
  void Insert(const std::string& name);

  void Remove(const std::string& name);

  std::vector<std::string> Names() const;

 private:
  std::multiset<std::string> set_;
  mutable std::mutex mtx_;
};

55
class VarBase {
J
Jiabin Yang 已提交
56 57
  DISABLE_COPY_AND_ASSIGN(VarBase);

58
 public:
Z
Zeng Jinle 已提交
59
  static std::vector<std::string> AliveVarNames();
J
Jiabin Yang 已提交
60
  explicit VarBase(bool has_grad, const std::string& name)
61
      : name_(name),
J
Jiabin Yang 已提交
62
        grad_var_(has_grad ? new VarBase(false, GradVarName()) : nullptr) {
Z
Zeng Jinle 已提交
63
    if (IsDebugEnabled()) {
J
Jiabin Yang 已提交
64
      VLOG(10) << "Construct VarBase: " << name;
Z
Zeng Jinle 已提交
65 66
      name_set_.Insert(name_);
    }
67
  }
68

J
Jiabin Yang 已提交
69 70 71 72
  explicit VarBase(const std::string& name) : VarBase(true, name) {}

  ~VarBase() {
    VLOG(10) << "Destruct VarBase: " << name_;
Z
Zeng Jinle 已提交
73 74 75
    if (IsDebugEnabled()) {
      name_set_.Remove(name_);
    }
M
minqiyang 已提交
76
  }
77

J
Jiabin Yang 已提交
78
  const framework::Variable& Var() const { return var_; }
79

J
Jiabin Yang 已提交
80
  framework::Variable* MutableVar() { return &var_; }
M
minqiyang 已提交
81

J
Jiabin Yang 已提交
82 83 84 85 86 87 88
  bool HasGradVar() const { return grad_var_ != nullptr; }

  const std::shared_ptr<VarBase>& GradVarBase() const { return grad_var_; }

  const framework::Variable& GradVar() const {
    PADDLE_ENFORCE_NOT_NULL(grad_var_, "Gradient of %s does not exist", name_);
    return grad_var_->var_;
M
minqiyang 已提交
89
  }
M
minqiyang 已提交
90

J
Jiabin Yang 已提交
91 92 93 94
  framework::Variable* MutableGradVar() {
    PADDLE_ENFORCE_NOT_NULL(grad_var_, "Gradient of %s does not exist", name_);
    return &(grad_var_->var_);
  }
X
Xin Pan 已提交
95

J
Jiabin Yang 已提交
96
  void SetStopGradient(bool stop_gradient) {
M
minqiyang 已提交
97
    stop_gradient_ = stop_gradient;
J
Jiabin Yang 已提交
98 99
    if (grad_var_) {
      grad_var_->stop_gradient_ = stop_gradient;
100
    }
M
minqiyang 已提交
101
  }
X
Xin Pan 已提交
102

J
Jiabin Yang 已提交
103
  bool StopGradient() const { return stop_gradient_; }
104

J
Jiabin Yang 已提交
105
  void SetPersistable(bool persistable) { persistable_ = persistable; }
106

J
Jiabin Yang 已提交
107
  bool Persistable() const { return persistable_; }
108

J
Jiabin Yang 已提交
109
  void AddGradOps(const std::weak_ptr<OpBase>& op);
X
Xin Pan 已提交
110

J
Jiabin Yang 已提交
111 112 113 114 115 116
  std::vector<OpBase*> GradOps() {
    std::vector<OpBase*> rlt;
    // TODO(jiabin): use better data structure to remove nullptr when we find it
    for (const auto& wk_ptr : grad_ops_) {
      OpBase* tmp_op = wk_ptr.lock().get();
      if (tmp_op) rlt.emplace_back(tmp_op);
M
minqiyang 已提交
117
    }
J
Jiabin Yang 已提交
118
    return rlt;
X
Xin Pan 已提交
119
  }
J
Jiabin Yang 已提交
120
  void ClearGradOps() { grad_ops_.clear(); }
X
Xin Pan 已提交
121

J
Jiabin Yang 已提交
122
  const std::string& Name() const { return name_; }
M
minqiyang 已提交
123

J
Jiabin Yang 已提交
124 125 126 127 128
  void SetName(const std::string& name) {
    name_ = name;
    if (grad_var_) {
      grad_var_->SetName(GradVarName());
    }
M
minqiyang 已提交
129 130
  }

J
Jiabin Yang 已提交
131
  std::string GradVarName() { return framework::GradVarName(name_); }
132

J
Jiabin Yang 已提交
133
  void SetType(framework::proto::VarType::Type type) { type_ = type; }
134

J
Jiabin Yang 已提交
135
  framework::proto::VarType::Type Type() const { return type_; }
136

J
Jiabin Yang 已提交
137 138 139 140
  void SetDataType(framework::proto::VarType::Type data_type) {
    data_type_ = data_type;
    if (grad_var_) {
      grad_var_->SetDataType(data_type_);
141 142 143
    }
  }

J
Jiabin Yang 已提交
144
  framework::proto::VarType::Type DataType() const { return data_type_; }
X
polish  
Xin Pan 已提交
145

J
Jiabin Yang 已提交
146
  void ClearGradient();
X
Xin Pan 已提交
147

J
Jiabin Yang 已提交
148 149
  std::shared_ptr<VarBase> NewVarBase(const platform::Place& dst_place,
                                      const bool blocking) const;
M
minqiyang 已提交
150

J
Jiabin Yang 已提交
151 152 153 154 155
 private:
  framework::Variable var_;
  std::string name_;
  std::shared_ptr<VarBase> grad_var_;
  mutable size_t copied_counter_ = 0;
156

J
Jiabin Yang 已提交
157 158
  // grad_op indicates which grad_op will this var be used as input
  std::vector<std::weak_ptr<OpBase>> grad_ops_;
X
polish  
Xin Pan 已提交
159

J
Jiabin Yang 已提交
160 161
  bool stop_gradient_{false};
  bool persistable_{false};
M
minqiyang 已提交
162

J
Jiabin Yang 已提交
163 164 165
  framework::proto::VarType::Type type_{framework::proto::VarType::LOD_TENSOR};
  framework::proto::VarType::Type data_type_{framework::proto::VarType::FP32};
  static ThreadSafeNameSet name_set_;
166 167 168 169 170 171
};

class Layer {
 public:
  virtual ~Layer() {}

172 173
  virtual std::vector<std::shared_ptr<VarBase>> Forward(
      const std::vector<std::shared_ptr<VarBase>>& inputs) {
J
Jiabin Yang 已提交
174
    return {};
175
  }
X
Xin Pan 已提交
176
};
177

M
minqiyang 已提交
178
// infer var type context for imperative mode
J
Jiabin Yang 已提交
179
class RuntimeInferVarTypeContext : public framework::InferVarTypeContext {
M
minqiyang 已提交
180
 public:
J
Jiabin Yang 已提交
181 182 183
  RuntimeInferVarTypeContext(const NameVarBaseMap& inputs,
                             const NameVarBaseMap* outputs,
                             const framework::AttributeMap& attrs_map)
M
minqiyang 已提交
184 185 186 187 188 189 190
      : InferVarTypeContext(nullptr, nullptr),
        inputs_(inputs),
        outputs_(outputs),
        attrs_(attrs_map),
        input_names_(),
        output_names_(),
        var_set_() {
J
Jiabin Yang 已提交
191 192 193
    input_names_.reserve(inputs_.size());
    for (auto& it : inputs_) {
      for (auto& var : it.second) {
M
minqiyang 已提交
194
        input_names_[it.first].emplace_back(var->Name());
J
Jiabin Yang 已提交
195
        var_set_[var->Name()] = var.get();
M
minqiyang 已提交
196 197 198 199 200
      }
    }

    output_names_.reserve(outputs_->size());
    for (auto& it : *outputs_) {
J
Jiabin Yang 已提交
201
      for (auto& var : it.second) {
M
minqiyang 已提交
202
        output_names_[it.first].emplace_back(var->Name());
J
Jiabin Yang 已提交
203
        var_set_[var->Name()] = var.get();
M
minqiyang 已提交
204 205 206 207
      }
    }
  }

M
minqiyang 已提交
208 209 210
  virtual ~RuntimeInferVarTypeContext() {}

  framework::Attribute GetAttr(const std::string& name) const override {
J
Jiabin Yang 已提交
211 212 213 214
    auto iter = attrs_.find(name);
    PADDLE_ENFORCE_EQ(iter != attrs_.end(), true, "Cannot find attribute %s",
                      name);
    return iter->second;
M
minqiyang 已提交
215 216
  }

M
minqiyang 已提交
217
  bool HasVar(const std::string& name) const override {
M
minqiyang 已提交
218 219 220
    return var_set_.count(name) > 0;
  }

M
minqiyang 已提交
221
  bool HasInput(const std::string& name) const override {
J
Jiabin Yang 已提交
222
    return inputs_.count(name) > 0;
M
minqiyang 已提交
223 224
  }

M
minqiyang 已提交
225
  bool HasOutput(const std::string& name) const override {
M
minqiyang 已提交
226 227 228 229
    PADDLE_ENFORCE_NOT_NULL(outputs_);
    return outputs_->count(name) > 0;
  }

M
minqiyang 已提交
230 231
  const std::vector<std::string>& Input(
      const std::string& name) const override {
J
Jiabin Yang 已提交
232 233 234 235
    auto iter = input_names_.find(name);
    PADDLE_ENFORCE_EQ(iter != input_names_.end(), true, "Cannot find input %s",
                      name);
    return iter->second;
M
minqiyang 已提交
236 237
  }

M
minqiyang 已提交
238 239
  const std::vector<std::string>& Output(
      const std::string& name) const override {
J
Jiabin Yang 已提交
240 241 242 243
    auto iter = output_names_.find(name);
    PADDLE_ENFORCE_EQ(iter != output_names_.end(), true,
                      "Cannot find output %s", name);
    return iter->second;
M
minqiyang 已提交
244 245
  }

M
minqiyang 已提交
246 247
  framework::proto::VarType::Type GetType(
      const std::string& name) const override {
J
Jiabin Yang 已提交
248 249 250 251
    auto iter = var_set_.find(name);
    PADDLE_ENFORCE_EQ(iter != var_set_.end(), true,
                      "Cannot find var %s in GetType", name);
    return iter->second->Type();
M
minqiyang 已提交
252 253
  }

M
minqiyang 已提交
254 255
  void SetType(const std::string& name,
               framework::proto::VarType::Type type) override {
256 257 258 259 260
    if (name == "kLookupTablePath") {
      VLOG(2) << "SUPER UGLY FIX, remove this when move imperative mode in C++";
    } else {
      var_set_[name]->SetType(type);
    }
M
minqiyang 已提交
261 262
  }

M
minqiyang 已提交
263 264
  framework::proto::VarType::Type GetDataType(
      const std::string& name) const override {
J
Jiabin Yang 已提交
265 266 267 268
    auto iter = var_set_.find(name);
    PADDLE_ENFORCE_EQ(iter != var_set_.end(), true,
                      "Cannot find var %s in GetDataType", name);
    return iter->second->DataType();
M
minqiyang 已提交
269 270
  }

M
minqiyang 已提交
271 272
  void SetDataType(const std::string& name,
                   framework::proto::VarType::Type type) override {
M
minqiyang 已提交
273
    var_set_[name]->SetDataType(type);
M
minqiyang 已提交
274 275
  }

M
minqiyang 已提交
276 277
  std::vector<framework::proto::VarType::Type> GetDataTypes(
      const std::string& name) const override {
M
minqiyang 已提交
278 279 280
    PADDLE_THROW("GetDataTypes is not supported in runtime InferVarType");
  }

M
minqiyang 已提交
281 282 283
  void SetDataTypes(const std::string& name,
                    const std::vector<framework::proto::VarType::Type>&
                        multiple_data_type) override {
M
minqiyang 已提交
284 285 286
    PADDLE_THROW("SetDataTypes is not supported in runtime InferVarType");
  }

M
minqiyang 已提交
287
  std::vector<int64_t> GetShape(const std::string& name) const override {
M
minqiyang 已提交
288 289 290
    PADDLE_THROW("Do not handle Shape in runtime InferVarType");
  }

M
minqiyang 已提交
291 292
  void SetShape(const std::string& name,
                const std::vector<int64_t>& dims) override {
M
minqiyang 已提交
293 294 295
    PADDLE_THROW("Do not handle Shape in runtime InferVarType");
  }

M
minqiyang 已提交
296
  int32_t GetLoDLevel(const std::string& name) const override {
M
minqiyang 已提交
297 298 299
    PADDLE_THROW("Do not handle LoDLevel in runtime InferVarType");
  }

M
minqiyang 已提交
300
  void SetLoDLevel(const std::string& name, int32_t lod_level) override {
M
minqiyang 已提交
301 302 303 304
    PADDLE_THROW("Do not handle LoDLevel in runtime InferVarType");
  }

 private:
J
Jiabin Yang 已提交
305 306 307
  const NameVarBaseMap& inputs_;
  const NameVarBaseMap* outputs_;
  const framework::AttributeMap& attrs_;
M
minqiyang 已提交
308 309
  std::unordered_map<std::string, std::vector<std::string>> input_names_;
  std::unordered_map<std::string, std::vector<std::string>> output_names_;
J
Jiabin Yang 已提交
310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395
  std::unordered_map<std::string, VarBase*> var_set_;
};

// TODO(zjl): to support py_func layer
class OpBase : public std::enable_shared_from_this<OpBase> {
  DISABLE_COPY_AND_ASSIGN(OpBase);

 public:
  ~OpBase() { VLOG(3) << "Destruct Op: " << Type() << std::endl; }

  // Developer should not rely on this method to create OpBase.
  // OpBase should be created in Tracer and managed by Tracer totally.
  template <typename... Args>
  static std::shared_ptr<OpBase> Create(Args&&... args) {
    return std::shared_ptr<OpBase>(new OpBase(std::forward<Args>(args)...));
  }

  size_t id() const { return id_; }

  const std::string& Type() const { return op_->Type(); }

  void Run(const NameVarBaseMap& ins, const NameVarBaseMap& outs);

  const framework::VariableNameMap& InputNameMap() const {
    return op_->Inputs();
  }

  const framework::VariableNameMap& OutputNameMap() const {
    return op_->Outputs();
  }

  const framework::AttributeMap& Attrs() const { return op_->Attrs(); }
  const framework::OpInfo& Info() const { return op_->Info(); }

  void ClearBackwardTrace();

  const std::vector<OpBase*>& GradPendingOps() const {
    return grad_pending_ops_;
  }

  void InsertGradPendingOps(OpBase* op) { grad_pending_ops_.emplace_back(op); }

  void SortGradPendingOps() {
    std::sort(grad_pending_ops_.begin(), grad_pending_ops_.end(),
              [](OpBase* op1, OpBase* op2) { return op1->id() > op2->id(); });
  }
  NameVarBaseMap* GetMutableOutsMap() { return &outs_; }
  NameVarBaseMap* GetMutableInsMap() { return &ins_; }
  const NameVarBaseMap& GetInsMap() { return ins_; }
  const NameVarBaseMap& GetOutsMap() { return outs_; }
  const platform::Place& place() const { return place_; }

  // TODO(jiabin) prepare for backward hook
  void RegisterBackwardHooks(const std::function<void()>& func) {
    backward_hooks_.emplace_back(func);
  }

  void InvokeBackwardHooks() {
    for (const auto& func : backward_hooks_) {
      func();
      VLOG(5) << "Invoke Backward Hook for: " << Type() << std::endl;
    }
  }

 private:
  OpBase(size_t id, const std::string& type, const NameVarBaseMap& ins,
         const NameVarBaseMap& outs, framework::AttributeMap attrs,
         const platform::Place& place);

  OpBase(size_t id, const framework::OpDesc& op_desc,
         const platform::Place& place);

  size_t id_;

  std::unique_ptr<framework::OperatorBase> op_;

  std::vector<std::function<void()>> backward_hooks_;
  platform::Place place_;

  // Not need to be std::weak_ptr, because op is binded to a certain Tracer,
  // and would not be used by a Tracer that does not create itself.
  std::vector<OpBase*> grad_pending_ops_;

  // This part is only used for backward
  NameVarBaseMap ins_;
  NameVarBaseMap outs_;
396 397 398 399
};

}  // namespace imperative
}  // namespace paddle