recurrent_network_op.h 5.8 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

#pragma once

#include "paddle/framework/operator.h"

namespace paddle {
namespace operators {

using namespace paddle::framework;

namespace rnn {

/**
 * Memory of a RNN (same as the role of `Momory` in PaddlePaddle).
 *
 * Memory attributes cached by this op, dims will be infered from
 * boot memories in father scope. Other attributes are copied from Op's proto
 * attributes.
 */
struct MemoryAttr {
  // name of current state variable
  std::string var;
  // name of previous step's state variable
  std::string pre_var;
  // name of the variables to init this memory (same role of `boot_layer` in
  // PaddlePaddle), which is store in father's scope.
  std::string boot_var;
};

struct Link {
  // input or output links name.
  std::string internal;
  // alias to avoid duplicate keys in scopes.
  std::string external;
};

struct Argument {
  std::string step_net;
  std::string step_scopes;
  std::vector<Link> inlinks;
  std::vector<Link> outlinks;
  std::vector<rnn::MemoryAttr> memories;
};

struct ArgumentName {
  std::string step_net;
  std::string step_scopes;
  std::string inlinks;
  std::string outlinks;
  std::string inlink_alias;   // the alias of inlinks in step net.
  std::string outlink_alias;  // the alias of outlinks in step net.
  std::string memories;       // the memory name
  std::string pre_memories;   // the previous memory name
  std::string boot_memories;  // the boot memory name
};

/**
 * Prepare inputs for each step net.
 */
Y
Yu Yang 已提交
73
void SegmentInputs(const std::vector<Scope*>& step_scopes,
Y
Yan Chunwei 已提交
74 75 76 77 78 79
                   const std::vector<Link>& inlinks,
                   const size_t seq_len);

/**
 * Process outputs of step nets and merge to variables.
 */
Y
Yu Yang 已提交
80
void ConcatOutputs(const std::vector<Scope*>& step_scopes,
Y
Yan Chunwei 已提交
81 82 83
                   const std::vector<Link>& outlinks,
                   const size_t seq_len);

Y
Yu Yang 已提交
84
void LinkMemories(const std::vector<Scope*>& step_scopes,
Y
Yan Chunwei 已提交
85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102
                  const std::vector<MemoryAttr>& memories,
                  size_t step_id,
                  int offset);

void InitArgument(const ArgumentName& name, Argument* arg);

};  // namespace rnn

// The sequence format in RecurrentOp is Tensor<seq_len, batch_size, dim> now.
// TODO:
// 1. No-padding computing for sequences with indifinite length in one batch.
// 2. Hierarchical RNN for sequence with sub-sequence.
// 3. Internal Memory.
// 4. More Complex RNN architecture, such as Gated Feedback RNN.
//    Refer to: https://arxiv.org/pdf/1502.02367.pdf

class RecurrentAlgorithm {
public:
Y
Yu Yang 已提交
103
  void Run(const Scope& scope, const platform::DeviceContext& dev_ctx) const;
Y
Yan Chunwei 已提交
104 105 106 107 108 109

  void Init(std::unique_ptr<rnn::Argument> arg) { arg_ = std::move(arg); }

  /**
   * InferShape must be called before Run.
   */
Y
Yu Yang 已提交
110
  void InferShape(const Scope& scope) const;
Y
Yan Chunwei 已提交
111 112 113 114 115 116 117 118

protected:
  /*
   * The step scopes will be stored in the father scope as a variable.
   *
   * NOTE the scopes are reused in both the forward and backward, so just
   * create once and expand its size if more steps need.
   */
Y
Yu Yang 已提交
119
  void CreateScopes(const Scope& scope) const;
Y
Yan Chunwei 已提交
120

Y
Yu Yang 已提交
121 122
  const std::vector<Scope*>& GetStepScopes(const Scope& scope) const {
    return *scope.FindVar(arg_->step_scopes)->GetMutable<std::vector<Scope*>>();
Y
Yan Chunwei 已提交
123 124
  }

Y
Yu Yang 已提交
125
  void InitMemories(Scope* step_scopes) const;
Y
Yan Chunwei 已提交
126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145

private:
  std::unique_ptr<rnn::Argument> arg_;
  mutable size_t seq_len_;
};

class RecurrentGradientAlgorithm {
  /**
   * RNN's backward alogorithm.
   *
   * To accelerate the development of RecurrentGradientOp, we decouple RNN's
   * algorithm and `OperatorBase`'s implementation, the former contains the core
   * implementation of a RNN, and will keep stable even if the framework changes
   * a
   * lot, and the latter is a wrapper acts like an dapter for it to make RNN an
   * operator.
   */
public:
  void Init(std::unique_ptr<rnn::Argument> arg) { arg_ = std::move(arg); }

Y
Yu Yang 已提交
146
  void Run(const Scope& scope, const platform::DeviceContext& dev_ctx) const;
Y
Yan Chunwei 已提交
147

Y
Yu Yang 已提交
148
  void LinkBootMemoryGradients(Scope* step_scopes) const;
Y
Yan Chunwei 已提交
149 150 151 152

  /**
   * InferShape must be called before Run.
   */
Y
Yu Yang 已提交
153
  void InferShape(const Scope& scope) const;
Y
Yan Chunwei 已提交
154 155

protected:
Y
Yu Yang 已提交
156 157
  inline const std::vector<Scope*>& GetStepScopes(const Scope& scope) const {
    return *scope.FindVar(arg_->step_scopes)->GetMutable<std::vector<Scope*>>();
Y
Yan Chunwei 已提交
158 159 160 161 162 163 164 165 166 167 168 169 170 171
  }

private:
  std::unique_ptr<rnn::Argument> arg_;
  mutable size_t seq_len_;
};

class RecurrentOp final : public OperatorBase {
public:
  void Init() override;

  /**
   * InferShape must be called before Run.
   */
Y
Yu Yang 已提交
172
  virtual void InferShape(const Scope& scope) const override {
Y
Yan Chunwei 已提交
173 174 175
    alg_.InferShape(scope);
  }

Y
Yu Yang 已提交
176
  virtual void Run(const Scope& scope,
Y
Yan Chunwei 已提交
177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
                   const platform::DeviceContext& dev_ctx) const override {
    alg_.Run(scope, dev_ctx);
  }

  static const rnn::ArgumentName kArgName;

private:
  RecurrentAlgorithm alg_;
};

class RecurrentGradientOp final : public OperatorBase {
public:
  void Init() override;

  /**
   * InferShape must be called before Run.
   */
Y
Yu Yang 已提交
194
  virtual void InferShape(const Scope& scope) const override {
Y
Yan Chunwei 已提交
195 196 197
    alg_.InferShape(scope);
  }

Y
Yu Yang 已提交
198
  virtual void Run(const Scope& scope,
Y
Yan Chunwei 已提交
199 200 201 202 203 204 205 206 207 208 209 210
                   const platform::DeviceContext& dev_ctx) const override {
    alg_.Run(scope, dev_ctx);
  }

  static const rnn::ArgumentName kArgName;

private:
  RecurrentGradientAlgorithm alg_;
};

}  // namespace operators
}  // namespace paddle