layer.h 2.8 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

X
Xin Pan 已提交
17
#include <map>
18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
#include <string>
#include <vector>
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/var_desc.h"
#include "paddle/fluid/platform/enforce.h"

namespace paddle {
namespace imperative {

class OpBase;

class VarBase {
 public:
  VarBase()
      : pre_op_(nullptr),
        pre_op_out_idx_(-1),
        var_desc_(nullptr),
X
Xin Pan 已提交
36 37 38 39 40 41 42 43 44 45 46 47 48
        var_(new framework::Variable()),
        grads_(new framework::Variable()) {}

  virtual ~VarBase() {
    if (var_) {
      delete var_;
      var_ = nullptr;
    }
    if (grads_) {
      delete grads_;
      grads_ = nullptr;
    }
  }
49

X
Xin Pan 已提交
50
  void RunBackward();
51 52 53 54

  framework::LoDTensor& Grad();

  OpBase* pre_op_;
X
Xin Pan 已提交
55
  std::string pre_op_out_name_;
56 57 58 59 60 61 62 63 64 65
  int pre_op_out_idx_;

  framework::VarDesc* var_desc_;
  framework::Variable* var_;
  framework::Variable* grads_;
};

class OpBase {
 public:
  OpBase()
X
Xin Pan 已提交
66 67
      : pre_ops_(new std::map<std::string, std::vector<OpBase*>>()),
        pre_ops_out_idx_(new std::map<std::string, std::vector<int>>()),
68 69 70 71 72 73 74 75 76 77 78
        op_desc_(nullptr),
        grad_op_desc_(nullptr) {}

  virtual ~OpBase() {
    delete pre_ops_;
    delete pre_ops_out_idx_;

    if (grad_op_desc_) delete grad_op_desc_;
    if (grad_to_var_) delete grad_to_var_;
  }

X
Xin Pan 已提交
79
  std::map<std::string, std::vector<VarBase*>> ApplyGrad();
80

X
Xin Pan 已提交
81 82 83 84
  std::map<std::string, std::vector<VarBase*>> input_vars_;
  std::map<std::string, std::vector<VarBase*>> output_vars_;
  std::map<std::string, std::vector<OpBase*>>* pre_ops_;
  std::map<std::string, std::vector<int>>* pre_ops_out_idx_;
85 86 87 88
  framework::OpDesc* op_desc_;

  framework::OpDesc* grad_op_desc_;
  std::unordered_map<std::string, std::string>* grad_to_var_;
X
Xin Pan 已提交
89 90
  std::map<std::string, std::vector<framework::Variable*>> grad_input_vars_;
  std::map<std::string, std::vector<framework::Variable*>> grad_output_vars_;
91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107
  framework::BlockDesc* block_;
};

class Layer {
 public:
  virtual ~Layer() {}

  virtual std::vector<VarBase> Forward(const std::vector<VarBase>& inputs) {
    std::vector<VarBase> vars;
    return vars;
  }

  virtual void Backward() { LOG(ERROR) << "To support customize"; }
};

}  // namespace imperative
}  // namespace paddle