recurrent_op.cc 7.2 KB
Newer Older
Y
Yan Chunwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

15
#include "paddle/operators/recurrent_op.h"
Y
Yan Chunwei 已提交
16 17 18 19 20

#include <cstring>
#include <sstream>

#include "paddle/framework/op_registry.h"
Y
Yan Chunwei 已提交
21
#include "paddle/operators/net_op.h"
Y
Yan Chunwei 已提交
22 23 24 25

namespace paddle {
namespace operators {

D
dongzhihong 已提交
26 27 28
using Scope = framework::Scope;
using Variable = framework::Variable;
using Tensor = framework::Tensor;
29
using LoDTensor = framework::LoDTensor;
D
dongzhihong 已提交
30

Q
qiaolongfei 已提交
31 32
void RecurrentAlgorithm::Run(const Scope& scope,
                             const platform::DeviceContext& dev_ctx) const {
Q
qiaolongfei 已提交
33 34
  auto* input0 = scope.FindVar(arg_->inlinks[0]);
  PADDLE_ENFORCE_NOT_NULL(input0);
Q
qiaolongfei 已提交
35 36
  size_t seq_len = input0->GetMutable<LoDTensor>()->dims()[0];
  PADDLE_ENFORCE_GT(seq_len, 0);
Q
qiaolongfei 已提交
37

Q
qiaolongfei 已提交
38
  CreateScopes(scope, seq_len);
Q
qiaolongfei 已提交
39
  auto& step_scopes = GetStepScopes(scope);
Q
qiaolongfei 已提交
40
  rnn::SegmentInputs(step_scopes, arg_->inlinks, seq_len);
Q
qiaolongfei 已提交
41
  InitMemories(step_scopes[0]);
Q
qiaolongfei 已提交
42

Q
qiaolongfei 已提交
43 44
  for (size_t step_id = 0; step_id < seq_len; step_id++) {
    if (step_id > 0) {
45
      rnn::LinkMemories(step_scopes, arg_->states, step_id, -1);
Y
Yan Chunwei 已提交
46
    }
Q
qiaolongfei 已提交
47
    (*stepnet_)->Run(*step_scopes[step_id], dev_ctx);
Y
Yan Chunwei 已提交
48
  }
49
  rnn::ConcatOutputs(step_scopes, arg_->outlinks, seq_len, dev_ctx);
Y
Yan Chunwei 已提交
50 51
}

Q
qiaolongfei 已提交
52 53
void RecurrentAlgorithm::CreateScopes(const Scope& scope,
                                      size_t seq_len) const {
Y
Yan Chunwei 已提交
54
  // TODO(superjom) Only two scopes are needed for inference, this case will be
Y
Yan Chunwei 已提交
55
  // supported later.
Q
qiaolongfei 已提交
56
  auto* step_scopes_var = scope.FindVar(arg_->step_scopes);
Y
Yan Chunwei 已提交
57
  PADDLE_ENFORCE(step_scopes_var != nullptr, "");
Q
qiaolongfei 已提交
58
  auto* step_scopes = step_scopes_var->GetMutable<std::vector<Scope*>>();
Y
Yan Chunwei 已提交
59 60

  // Now all variables in scope must be created outside of op.
Y
Yan Chunwei 已提交
61
  PADDLE_ENFORCE_NOT_NULL(stepnet_);
62 63
  PADDLE_ENFORCE(!(*stepnet_)->Outputs().empty(),
                 "step_unit_ op has no outputs");
Y
Yan Chunwei 已提交
64

Q
qiaolongfei 已提交
65 66
  if (seq_len > step_scopes->size()) {
    for (size_t i = step_scopes->size(); i < seq_len; ++i) {
Y
Yu Yang 已提交
67
      auto& step_scope = scope.NewScope();
Y
Yan Chunwei 已提交
68

Y
Yan Chunwei 已提交
69
      // create step net's temp inputs
Y
Yan Chunwei 已提交
70
      for (auto& input : (*stepnet_)->Inputs()) {
71
        // the weight are located in parent scope
Y
Yu Yang 已提交
72 73
        for (auto& var_name : input.second) {
          if (!step_scope.FindVar(var_name)) {
D
dongzhihong 已提交
74
            step_scope.Var(var_name)->GetMutable<LoDTensor>();
Y
Yu Yang 已提交
75 76
          }
        }
Y
Yan Chunwei 已提交
77
      }
Y
Yan Chunwei 已提交
78
      // create stepnet's outputs
Y
Yan Chunwei 已提交
79
      for (const auto& output : (*stepnet_)->Outputs()) {
Y
Yu Yang 已提交
80
        for (auto& var_name : output.second) {
D
dongzhihong 已提交
81
          step_scope.Var(var_name);
Y
Yu Yang 已提交
82
        }
Y
Yan Chunwei 已提交
83
      }
Y
Yu Yang 已提交
84
      step_scopes->emplace_back(&step_scope);
Y
Yan Chunwei 已提交
85 86 87 88
    }
  }
}

Q
qiaolongfei 已提交
89
void RecurrentAlgorithm::InitMemories(Scope* step_scope) const {
90
  for (auto& attr : arg_->states) {
D
dongzhihong 已提交
91
    auto* pre_mem = step_scope->Var(attr.pre_var)->GetMutable<LoDTensor>();
Y
Yu Yang 已提交
92
    PADDLE_ENFORCE(step_scope->FindVar(attr.boot_var) != nullptr,
93
                   "memory [%s]'s boot variable [%s] not exists", attr.var,
Y
Yan Chunwei 已提交
94
                   attr.boot_var);
95 96
    auto* boot_mem =
        step_scope->FindVar(attr.boot_var)->GetMutable<LoDTensor>();
Q
qiaolongfei 已提交
97 98
    pre_mem->Resize(boot_mem->dims());
    PADDLE_ENFORCE_EQ(pre_mem->dims().size(), 2);
99
    pre_mem->ShareDataWith(*boot_mem);
Y
Yan Chunwei 已提交
100 101 102
  }
}

103
const rnn::ArgumentName RecurrentOp::kArgName{
104 105
    "step_net", "step_scopes", "inputs",        "outputs",
    "states",   "ex_states",   "initial_states"};
106 107

const rnn::ArgumentName RecurrentGradientOp::kArgName{
108 109
    "step_net", "step_scopes@GRAD", "outputs@GRAD",       "inputs@GRAD",
    "states",   "ex_states",        "initial_states@GRAD"};
Y
Yan Chunwei 已提交
110

Y
Yu Yang 已提交
111
RecurrentOp::RecurrentOp(const std::string& type,
Y
Yu Yang 已提交
112 113
                         const framework::VariableNameMap& inputs,
                         const framework::VariableNameMap& outputs,
Y
Yu Yang 已提交
114 115
                         const framework::AttributeMap& attrs)
    : OperatorBase(type, inputs, outputs, attrs) {
Y
Yan Chunwei 已提交
116 117
  rnn::InitArgument(kArgName, &arg_, *this);
  alg_.Init(&arg_, &stepnet_);
Y
Yan Chunwei 已提交
118 119
}

D
dongzhihong 已提交
120 121
class RecurrentAlgorithmProtoAndCheckerMaker
    : public framework::OpProtoAndCheckerMaker {
122
 public:
D
dongzhihong 已提交
123 124
  RecurrentAlgorithmProtoAndCheckerMaker(framework::OpProto* proto,
                                         framework::OpAttrChecker* op_checker)
Y
Yan Chunwei 已提交
125 126 127
      : OpProtoAndCheckerMaker(proto, op_checker) {
    const auto& name = RecurrentOp::kArgName;
    // inputs and outputs stored in proto
D
dangqingqing 已提交
128 129
    AddInput(name.inlinks,
             "the inputs that need to be segmented for each step.")
Y
Yu Yang 已提交
130
        .AsDuplicable();
131
    AddInput(name.initial_states, "variables to initialize states.")
Y
Yu Yang 已提交
132
        .AsDuplicable();
Y
Yan Chunwei 已提交
133

D
dangqingqing 已提交
134
    AddOutput(name.outlinks, "the outputs that need to concated for all steps.")
Y
Yu Yang 已提交
135
        .AsDuplicable();
Y
Yan Chunwei 已提交
136 137 138
    AddOutput(name.step_scopes, "step scopes");

    // Attributes stored in AttributeMap
139 140
    AddAttr<std::vector<std::string>>(name.ex_states, "names of pre-states");
    AddAttr<std::vector<std::string>>(name.states, "names of states");
Y
Yan Chunwei 已提交
141 142 143 144 145 146

    AddComment("This is a recurrent group operator.");
  }
};

void RecurrentGradientAlgorithm::Run(
Y
Yu Yang 已提交
147
    const Scope& scope, const platform::DeviceContext& dev_ctx) const {
Q
qiaolongfei 已提交
148 149 150
  auto* input0 = scope.FindVar(arg_->inlinks[0]);
  PADDLE_ENFORCE_NOT_NULL(input0);
  size_t seq_len = input0->GetMutable<LoDTensor>()->dims()[0];
Q
qiaolongfei 已提交
151
  auto& step_scopes = GetStepScopes(scope);
Q
qiaolongfei 已提交
152 153
  rnn::SegmentInputs(step_scopes, arg_->inlinks, seq_len);
  for (int step_id = seq_len - 1; step_id >= 0; --step_id) {
154
    if (static_cast<size_t>(step_id) != seq_len - 1) {
155
      rnn::LinkMemories(step_scopes, arg_->states, step_id, 1);
Y
Yan Chunwei 已提交
156
    }
Y
Yan Chunwei 已提交
157
    (*stepnet_)->Run(*step_scopes[step_id], dev_ctx);
Y
Yan Chunwei 已提交
158
  }
159
  rnn::ConcatOutputs(step_scopes, arg_->outlinks, seq_len, dev_ctx);
Q
qiaolongfei 已提交
160
  LinkBootMemoryGradients(step_scopes[0]);
Y
Yan Chunwei 已提交
161 162 163
}

void RecurrentGradientAlgorithm::LinkBootMemoryGradients(
Q
qiaolongfei 已提交
164
    Scope* step_scope) const {
165
  for (auto& attr : arg_->states) {
D
dangqingqing 已提交
166
    PADDLE_ENFORCE(step_scope->FindVar(attr.var) != nullptr,
167
                   "memory variable [%s] does not exists", attr.var);
Y
Yu Yang 已提交
168
    PADDLE_ENFORCE(step_scope->FindVar(attr.boot_var) != nullptr,
169
                   "boot variable [%s] does not exists", attr.boot_var);
D
dongzhihong 已提交
170
    auto* mem_grad = step_scope->Var(attr.var)->GetMutable<LoDTensor>();
171
    auto* boot_mem_grad =
D
dongzhihong 已提交
172
        step_scope->Var(attr.boot_var)->GetMutable<LoDTensor>();
Q
qiaolongfei 已提交
173
    boot_mem_grad->Resize(mem_grad->dims());
174
    boot_mem_grad->ShareDataWith(*mem_grad);
Q
qiaolongfei 已提交
175 176 177
  }
}

Y
Yu Yang 已提交
178
RecurrentGradientOp::RecurrentGradientOp(
Y
Yu Yang 已提交
179 180
    const std::string& type, const framework::VariableNameMap& inputs,
    const framework::VariableNameMap& outputs,
Y
Yu Yang 已提交
181 182
    const framework::AttributeMap& attrs)
    : OperatorBase(type, inputs, outputs, attrs) {
S
superjom 已提交
183
  rnn::InitArgument(kArgName, &arg_, *this, true /*is grad*/);
Y
Yan Chunwei 已提交
184
  alg_.Init(&arg_, &stepnet_);
Y
Yan Chunwei 已提交
185 186 187 188 189
}

}  // namespace operators
}  // namespace paddle

S
superjom 已提交
190 191 192
REGISTER_OP(recurrent, paddle::operators::RecurrentOp,
            paddle::operators::RecurrentAlgorithmProtoAndCheckerMaker,
            recurrent_grad, paddle::operators::RecurrentGradientOp);