recurrent_op.cc 29.1 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yan Chunwei 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yan Chunwei 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yan Chunwei 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yan Chunwei 已提交
14

15 16 17 18
#include "paddle/fluid/operators/recurrent_op.h"

#include <algorithm>
#include "paddle/fluid/string/string_helper.h"
Y
Yan Chunwei 已提交
19 20 21 22

namespace paddle {
namespace operators {

Y
Yu Yang 已提交
23 24
using StepScopeVar = std::vector<framework::Scope *>;

25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
const char RecurrentBase::kInputs[] = "inputs";
const char RecurrentBase::kInitialStates[] = "initial_states";
const char RecurrentBase::kParameters[] = "parameters";
const char RecurrentBase::kOutputs[] = "outputs";
const char RecurrentBase::kStepScopes[] = "step_scopes";
const char RecurrentBase::kHasStates[] = "has_states";
const char RecurrentBase::kExStates[] = "ex_states";
const char RecurrentBase::kStates[] = "states";
const char RecurrentBase::kStepBlock[] = "sub_block";
const char RecurrentBase::kReverse[] = "reverse";
const char RecurrentBase::kIsTrain[] = "is_train";
const char RecurrentBase::kSkipEagerDeletionVars[] = "skip_eager_deletion_vars";
#define GRAD_SUFFIX "@GRAD"
const char RecurrentBase::kInputGrads[] = "inputs" GRAD_SUFFIX;
const char RecurrentBase::kOutputGrads[] = "outputs" GRAD_SUFFIX;
const char RecurrentBase::kParamGrads[] = "parameters" GRAD_SUFFIX;
const char RecurrentBase::kInitStateGrads[] = "initial_states" GRAD_SUFFIX;

43 44 45 46 47 48 49 50
static void ClearStepScopes(const platform::DeviceContext &dev_ctx,
                            framework::Scope *parent_scope,
                            StepScopeVar *step_scopes) {
  if (step_scopes->empty()) return;

  dev_ctx.Wait();

  for (auto *sub_scope : *step_scopes) {
51 52 53
    if (parent_scope->HasKid(sub_scope)) {
      parent_scope->DeleteScope(sub_scope);
    }
54 55 56 57 58
  }

  step_scopes->clear();
}

59 60 61 62 63 64 65 66
StepScopes::StepScopes(const platform::DeviceContext &dev_ctx,
                       const framework::Scope &parent, StepScopeVar *scopes,
                       bool is_train, size_t seq_len, bool is_backward)
    : counter_(is_backward ? seq_len - 1 : 0UL),
      scopes_(scopes),
      is_train_(is_train),
      is_backward_(is_backward) {
  size_t num_step_scopes = is_train ? seq_len : 2;
67
  PADDLE_ENFORCE_EQ(is_train || !is_backward, true,
68 69
                    platform::errors::PreconditionNotMet(
                        "Cannot backward when is not training"));
70 71 72 73 74
  if (!is_backward_) {
    ClearStepScopes(dev_ctx, const_cast<framework::Scope *>(&parent), scopes);
    scopes->reserve(static_cast<size_t>(num_step_scopes));
    for (size_t i = 0; i < num_step_scopes; ++i) {
      scopes->emplace_back(&parent.NewScope());
Y
Yan Chunwei 已提交
75
    }
Y
Yu Yang 已提交
76
  }
77 78 79
}

framework::Scope &StepScopes::CurScope() { return GetScope(counter_); }
Y
Yu Yang 已提交
80

81 82 83 84
framework::Scope &StepScopes::ExScope() {
  auto &scope = GetScope(is_backward_ ? counter_ + 1 : counter_ - 1);
  return scope;
}
Y
Yu Yang 已提交
85

86 87 88
void StepScopes::BackwardNext(const platform::DeviceContext &dev_ctx,
                              framework::Scope *parent_scope) {
  PADDLE_ENFORCE_EQ(is_backward_, true,
89 90
                    platform::errors::PreconditionNotMet(
                        "Cannot get backward next scope when is forward"));
91 92 93 94
  if (counter_ + 2 == scopes_->size()) {
    parent_scope->DeleteScope((*scopes_)[counter_ + 1]);
    scopes_->pop_back();
    VLOG(3) << "Deleted scope at " << counter_ + 1;
Y
Yu Yang 已提交
95
  }
96 97 98 99 100
  --counter_;
}

void StepScopes::ForwardNext() {
  PADDLE_ENFORCE_EQ(is_backward_, false,
101 102
                    platform::errors::PreconditionNotMet(
                        "Cannot get forward next scope when is backward"));
103
  ++counter_;
104
}
Y
Yu Yang 已提交
105

106 107 108
framework::Scope &StepScopes::GetScope(size_t scope_id) const {
  if (!is_train_) {
    scope_id %= 2;
Y
Yu Yang 已提交
109
  }
110 111 112 113
  PADDLE_ENFORCE_LT(
      scope_id, scopes_->size(),
      platform::errors::InvalidArgument(
          "Input scope_id is greater than scopes size in RecurrentOp"));
114 115
  return *(*scopes_)[scope_id];
}
Y
Yu Yang 已提交
116

117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
RecurrentBase::RecurrentBase(const std::string &type,
                             const framework::VariableNameMap &inputs,
                             const framework::VariableNameMap &outputs,
                             const framework::AttributeMap &attrs)
    : OperatorBase(type, inputs, outputs, attrs) {}

// Get SequenceLength from Scope
//   The sequence length is got from input tensor. The input tensor's
//   dimension should be [SEQ_LEN, ..., ...]. The first of the tensor's shape
//   is SEQ_LEN. The second of the tensor's shape could be the batch size or
//   nested sequence length.
int64_t RecurrentBase::GetSequenceLength(const framework::Scope &scope) const {
  // Dim format SEQ_LEN, BATCH_SIZE, ...
  int64_t seq_len = -1;
  auto &all_inputs = Inputs(kInputs);
132 133 134
  PADDLE_ENFORCE_EQ(
      all_inputs.empty(), false,
      platform::errors::InvalidArgument("RecurrentOp gets empty input"));
135 136
  for (auto &iname : all_inputs) {
    auto *var = scope.FindVar(iname);
137 138 139 140 141 142 143 144
    PADDLE_ENFORCE_NOT_NULL(var,
                            platform::errors::InvalidArgument(
                                "RecurrentOp finds var %s is NULL", iname));
    PADDLE_ENFORCE_EQ(var->IsType<framework::LoDTensor>(), true,
                      platform::errors::InvalidArgument(
                          "RecurrentOp only accepts LoDTensor as input but "
                          "input var %s is not LoDTensor",
                          iname));
145 146 147 148
    auto &dim = var->Get<framework::LoDTensor>().dims();
    if (seq_len == -1) {
      seq_len = dim[0];
    } else {
149 150 151 152 153
      PADDLE_ENFORCE_EQ(seq_len, dim[0],
                        platform::errors::InvalidArgument(
                            "Sequence length of input %s in RecurrentOp is NOT "
                            "equal to sequence length of previous input",
                            iname));
Y
Yu Yang 已提交
154 155
    }
  }
156 157 158
  PADDLE_ENFORCE_GE(seq_len, 0,
                    platform::errors::InvalidArgument(
                        "RecurrentOp gets invalid sequence length."));
159 160
  return seq_len;
}
Y
Yu Yang 已提交
161

162 163 164 165 166 167 168 169 170 171 172 173 174
// for src_tensor, dst_tensor in zip(map(src_scope.FindVar, src_vars),
//                                   map(dst_scope.Var, dst_vars)):
//   dst_tensor.ShareDataWith(src_tensor)
void RecurrentBase::LinkTensor(const framework::Scope &src_scope,
                               const std::vector<std::string> &src_vars,
                               framework::Scope *dst_scope,
                               const std::vector<std::string> &dst_vars) {
  LinkTensorWithCallback(
      src_scope, src_vars, dst_scope, dst_vars,
      [&](const framework::Tensor &src, framework::Tensor *dst) {
        dst->ShareDataWith(src);
      });
}
Y
Yu Yang 已提交
175

176 177 178 179 180 181 182
// (seq_len, shape) -> return [seq_len] + list(shape)
framework::DDim RecurrentBase::PrependDims(size_t seq_len,
                                           const framework::DDim &src) {
  auto dims = framework::vectorize(src);
  dims.insert(dims.begin(), static_cast<int64_t>(seq_len));
  return framework::make_ddim(dims);
}
Y
Yu Yang 已提交
183

184 185 186 187 188
RecurrentOp::RecurrentOp(const std::string &type,
                         const framework::VariableNameMap &inputs,
                         const framework::VariableNameMap &outputs,
                         const framework::AttributeMap &attrs)
    : RecurrentBase(type, inputs, outputs, attrs) {}
Y
Yu Yang 已提交
189

190 191 192 193
void RecurrentOp::RunImpl(const framework::Scope &scope,
                          const platform::Place &place) const {
  bool has_state = Attr<bool>(kHasStates);
  auto seq_len = static_cast<size_t>(this->GetSequenceLength(scope));
Y
Yu Yang 已提交
194

195 196 197
  // get device context from pool
  platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
  auto &dev_ctx = *pool.Get(place);
Y
Yu Yang 已提交
198

199 200
  VLOG(3) << "Static RNN input sequence length = " << seq_len;
  auto reverse = Attr<bool>(kReverse);
Y
Yu Yang 已提交
201

202 203
  framework::Executor executor(place);
  auto *block = Attr<framework::BlockDesc *>(kStepBlock);
Y
Yu Yang 已提交
204

205 206 207 208
  auto *program = block->Program();
  auto ctx = executor.Prepare(
      *program, block->ID(), Attr<std::vector<std::string>>(
                                 kSkipEagerDeletionVars) /*skip_ref_cnt_vars*/);
Y
Yu Yang 已提交
209

210 211 212 213 214 215 216
  static std::mutex mutex;
  std::lock_guard<std::mutex> lock(mutex);
  StepScopes scopes = CreateStepScopes(dev_ctx, scope, seq_len);
  // TODO(gfwm2013) Function CreateStepScopes would make segmentation fault in
  // multithreading in eval process, so we use a mutex before function
  // CreateStepScopes to make sure that the computing process is correct. This
  // problem will fix in next pull request.
217 218 219
  for (size_t i = 0; i < seq_len; ++i) {
    size_t seq_offset = reverse ? seq_len - i - 1 : i;
    VLOG(3) << "Recurrent operate at the time step " << seq_offset;
Y
Yu Yang 已提交
220

221
    auto &cur_scope = scopes.CurScope();
Y
Yu Yang 已提交
222

223 224 225 226 227 228 229 230 231 232 233
    // Link outside::input --> inside::input
    //   inside::input = outside::input[seq_offset: seq_offset+1]
    LinkTensorWithCallback(
        scope, Inputs(kInputs), &cur_scope, Inputs(kInputs),
        [&seq_offset](const framework::Tensor &outside,
                      framework::Tensor *inside) {
          inside->ShareDataWith(outside.Slice(seq_offset, seq_offset + 1));
          auto dims = framework::vectorize(inside->dims());
          dims.erase(dims.begin());
          inside->Resize(framework::make_ddim(dims));
        });
Y
Yu Yang 已提交
234

235 236 237 238 239 240 241 242 243 244 245
    if (has_state) {
      if (i == 0) {
        // Link initial states  --> ex_states
        LinkTensor(scope, Inputs(kInitialStates), &cur_scope,
                   Attr<std::vector<std::string>>(kExStates));
      } else {
        auto &ex_scope = scopes.ExScope();
        // Link ex_scope::state --> cur_scope::ex_state
        LinkTensor(ex_scope, Attr<std::vector<std::string>>(kStates),
                   &cur_scope, Attr<std::vector<std::string>>(kExStates));
      }
Y
Yu Yang 已提交
246 247
    }

248 249 250 251 252 253 254 255 256 257 258 259 260 261 262
    // Link inside::output -> outside::output
    //   outside::output[seq_offset: seq_offset + 1] = inside::output
    executor.CreateVariables(ctx->prog_, &cur_scope, ctx->block_id_);
    if (i > 0) {
      LinkTensorWithCallback(scope, Outputs(kOutputs), cur_scope,
                             Outputs(kOutputs),
                             [&](const framework::LoDTensor &src_tensor,
                                 framework::LoDTensor *dst_tensor) {
                               framework::Tensor src_slice =
                                   src_tensor.Slice(seq_offset, seq_offset + 1);
                               dst_tensor->ShareDataWith(src_slice);
                             });
    }

    // Linked now, execute!
263 264
    executor.RunPreparedContext(ctx.get(), &cur_scope,
                                false /*create_local_scope*/,
265 266 267 268 269 270 271
                                false /*create_vars*/, true /* keep_kids */);
    if (i == 0) {
      LinkTensorWithCallback(
          cur_scope, Outputs(kOutputs), scope, Outputs(kOutputs),
          [&](const framework::LoDTensor &src_tensor,
              framework::LoDTensor *dst_tensor) {
            // create output tensor at begin
272 273 274
            dst_tensor->Resize(PrependDims(seq_len, src_tensor.dims()));
            dst_tensor->mutable_data(place, src_tensor.type());

275 276 277 278 279 280
            auto dst_out = dst_tensor->Slice(seq_offset, seq_offset + 1);
            // Explicit copy output since the local RNN scope can be destroyed
            // early.
            framework::TensorCopy(src_tensor, place, dev_ctx, &dst_out);
          });
    }
281

282
    scopes.ForwardNext();
Y
Yu Yang 已提交
283
  }
284
}
Y
Yu Yang 已提交
285

286 287 288 289
StepScopes RecurrentOp::CreateStepScopes(const platform::DeviceContext &dev_ctx,
                                         const framework::Scope &scope,
                                         size_t seq_len) const {
  auto *var = scope.FindVar(Output(kStepScopes));
290 291
  PADDLE_ENFORCE_NOT_NULL(var, platform::errors::InvalidArgument(
                                   "RecurrentOp gets empty StepScopes var"));
292 293 294
  return StepScopes(dev_ctx, scope, var->GetMutable<StepScopeVar>(),
                    Attr<bool>(kIsTrain), seq_len);
}
Y
Yu Yang 已提交
295

296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358
RecurrentGradOp::RecurrentGradOp(const std::string &type,
                                 const framework::VariableNameMap &inputs,
                                 const framework::VariableNameMap &outputs,
                                 const framework::AttributeMap &attrs)
    : RecurrentBase(type, inputs, outputs, attrs) {}

void RecurrentGradOp::RunImpl(const framework::Scope &scope,
                              const platform::Place &place) const {
  bool has_state = Attr<bool>(kHasStates);
  const size_t seq_len = static_cast<size_t>(GetSequenceLength(scope));

  // get device context from pool
  platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
  auto &dev_ctx = *pool.Get(place);

  StepScopes scopes = CreateStepScopes(dev_ctx, scope, seq_len);
  auto reverse = Attr<bool>(kReverse);

  framework::Executor executor(place);
  auto *block = Attr<framework::BlockDesc *>(kStepBlock);
  auto *program = block->Program();
  auto ctx = executor.Prepare(
      *program, block->ID(), Attr<std::vector<std::string>>(
                                 kSkipEagerDeletionVars) /*skip_ref_cnt_vars*/);

  for (size_t step_id = 0; step_id < seq_len; ++step_id) {
    size_t seq_offset = reverse ? step_id : seq_len - step_id - 1;
    VLOG(3) << "Recurrent backward operate at the time step " << seq_offset;
    auto &cur_scope = scopes.CurScope();

    // Link outside::output_grads --> inside::output_grads
    //   inside::output_grad = outside::output_grad[seq_offset:seq_offset+1]
    LinkTensorWithCallback(
        scope, Inputs(kOutputGrads), &cur_scope, Inputs(kOutputGrads),
        [&](const framework::Tensor &outside, framework::Tensor *inside) {
          inside->ShareDataWith(outside.Slice(seq_offset, seq_offset + 1));
          auto dims = framework::vectorize(inside->dims());
          dims.erase(dims.begin());
          inside->Resize(framework::make_ddim(dims));
        },
        true /*is_backward*/);
    auto og_set = List2Set(Inputs(kOutputGrads));

    if (VLOG_IS_ON(10)) {
      std::ostringstream sout;
      std::copy(og_set.begin(), og_set.end(),
                std::ostream_iterator<std::string>(sout, ","));
      VLOG(10) << " RNN output gradients = [" << sout.str() << "]";
    }

    if (has_state) {
      // Link states
      //   if cur_scope::cur_state_grad in out_grads:
      //     cur_scope::cur_state_grad += ex_scope::ex_state_grad
      //   else:
      //     ex_scope::ex_state_grad --> cur_scope::cur_state_grad
      if (step_id != 0) {  // not at beginning
        auto &ex_scope = scopes.ExScope();
        auto ex_state_grads =
            GradVarLists(Attr<std::vector<std::string>>(kExStates));
        auto cur_state_grads =
            GradVarLists(Attr<std::vector<std::string>>(kStates));

359 360 361 362
        PADDLE_ENFORCE_EQ(ex_state_grads.size(), cur_state_grads.size(),
                          platform::errors::InvalidArgument(
                              "lengths of ex_states and cur_states are not "
                              "equal in RecurrentGradOp"));
363 364 365
        for (size_t i = 0; i < ex_state_grads.size(); ++i) {
          auto &cur_grad = cur_state_grads[i];
          auto &ex_grad = ex_state_grads[i];
366
          auto &ex_grad_tensor =
367 368 369 370
              ex_scope.FindVar(ex_grad)->Get<framework::LoDTensor>();

          VLOG(10) << " RNN link " << cur_grad << " from " << ex_grad;
          auto *cur_grad_var = cur_scope.Var(cur_grad);
371
          framework::LoDTensor *cur_grad_tensor =
372
              cur_grad_var->GetMutable<framework::LoDTensor>();
373
          cur_grad_tensor->ShareDataWith(ex_grad_tensor);
Y
Yu Yang 已提交
374
        }
Y
Yan Chunwei 已提交
375
      }
376
    }
Y
Yu Yang 已提交
377

378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396
    // Link inside::output -> outside::output
    //   outside::output[seq_offset: seq_offset + 1] = inside::output
    executor.CreateVariables(ctx->prog_, &cur_scope, ctx->block_id_);
    if (step_id > 0) {
      LinkTensorWithCallback(scope, Outputs(kInputGrads), cur_scope,
                             GradVarLists(Inputs(kInputs)),
                             [&](const framework::LoDTensor &src_tensor,
                                 framework::LoDTensor *dst_tensor) {
                               if (src_tensor.memory_size() ==
                                   0) {  // Inside Gradient is not created.
                                 return;
                               }
                               framework::Tensor src_slice =
                                   src_tensor.Slice(seq_offset, seq_offset + 1);
                               dst_tensor->ShareDataWith(src_slice);
                             },
                             true /*is_backward*/);
    }

397 398 399 400
    VLOG(5) << "Recurrent memory linking finished ";
    // Run step block with cur_scope
    executor.RunPreparedContext(ctx.get(), &cur_scope,
                                false /*create_local_scope*/,
401
                                false /*create_vars*/, true /* keep_kids */);
Y
Yu Yang 已提交
402

403
    VLOG(5) << "executor.Run finished ";
Y
Yu Yang 已提交
404

405
    auto local_var_names = LocalVarNames(cur_scope);
Y
Yu Yang 已提交
406

407 408 409 410 411 412 413
    // Accumulate params
    //   if (step == 0):
    //      outside::param_grad = 0.0
    //   outside::param_grad += inside::param_grad
    {
      auto &pg_names = Outputs(kParamGrads);
      auto &p_names = Inputs(kParameters);
414 415 416 417
      PADDLE_ENFORCE_EQ(pg_names.size(), p_names.size(),
                        platform::errors::InvalidArgument(
                            "Sizes of Parameters and ParamGrads are not equal "
                            "in RecurrentGradOp"));
Y
Yu Yang 已提交
418

419 420
      for (size_t param_id = 0; param_id < pg_names.size(); ++param_id) {
        auto inside_grad_name = framework::GradVarName(p_names[param_id]);
Y
Yu Yang 已提交
421

422 423 424 425 426
        // If does not compute gradient of that variable inside rnn, just
        // continue
        if (local_var_names.find(inside_grad_name) == local_var_names.end()) {
          continue;
        }
Y
Yu Yang 已提交
427

428 429 430 431 432 433
        // zero gradient variable in step 0
        if (step_id == 0) {
          auto &inside_tensor =
              cur_scope.FindVar(inside_grad_name)->Get<framework::LoDTensor>();
          framework::AttributeMap attrs;
          attrs["dtype"] = inside_tensor.type();
434
          attrs["shape"] = framework::vectorize<int>(inside_tensor.dims());
435 436 437 438 439 440 441
          attrs["value"] = 0.0f;

          auto zero_op = framework::OpRegistry::CreateOp(
              "fill_constant", framework::VariableNameMap{},
              {{"Out", {pg_names[param_id]}}}, attrs);
          zero_op->Run(scope, place);
        }
Y
Yu Yang 已提交
442

443
        auto new_inside_name = cur_scope.Rename(inside_grad_name);
Y
Yu Yang 已提交
444

445 446 447 448 449 450
        // sum gradient
        auto sum_op = framework::OpRegistry::CreateOp(
            "sum", {{"X", {pg_names[param_id], new_inside_name}}},
            {{"Out", {pg_names[param_id]}}},
            framework::AttributeMap{{"use_mkldnn", {false}}});
        sum_op->Run(cur_scope, place);
Y
Yu Yang 已提交
451

452
        cur_scope.Rename(new_inside_name, inside_grad_name);
Y
Yan Chunwei 已提交
453
      }
454 455 456 457 458
    }
    VLOG(5) << "Accumulate Parameter finished ";

    // Copy input gradient from inside to outside
    //   outside::input_grad[seq_offset: seq_offset + 1] = inside::input_grad
459 460 461 462 463 464 465 466 467
    if (step_id == 0) {
      LinkTensorWithCallback(
          cur_scope, GradVarLists(Inputs(kInputs)), scope, Outputs(kInputGrads),
          [&](const framework::LoDTensor &inside,
              framework::LoDTensor *outside) {
            if (inside.memory_size() == 0) {  // IG is not created.
              return;
            }
            // Alloc outside memory
468 469 470
            outside->Resize(PrependDims(seq_len, inside.dims()));
            outside->mutable_data(place, inside.type());

471 472 473 474 475
            auto dst = outside->Slice(seq_offset, seq_offset + 1);
            framework::TensorCopy(inside, place, dev_ctx, &dst);
          },
          true /*is_backward*/);
    }
476 477 478 479 480 481 482 483 484 485 486
    VLOG(5) << "Link outside gradient finished ";

    if (has_state) {
      if (step_id + 1 == seq_len) {  // at_end
        // copy initialize states gradient from inside to outside
        LinkTensorWithCallback(
            cur_scope, GradVarLists(Attr<std::vector<std::string>>(kExStates)),
            scope, Outputs(kInitStateGrads),
            [&](const framework::LoDTensor &inside,
                framework::LoDTensor *outside) {
              outside->Resize(inside.dims());
D
dzhwinter 已提交
487
              outside->mutable_data(place, inside.type());
488 489 490 491
              framework::TensorCopy(inside, place, dev_ctx, outside);
            },
            true /*is_backward*/);
        VLOG(5) << "Link initialize state gradient finished ";
Y
Yu Yang 已提交
492
      }
Y
Yan Chunwei 已提交
493
    }
494
    scopes.BackwardNext(dev_ctx, const_cast<framework::Scope *>(&scope));
Y
Yan Chunwei 已提交
495
  }
496 497
  // Delete the scope of StepScopes
  auto *var = scope.FindVar(Input(kStepScopes));
498 499 500
  PADDLE_ENFORCE_NOT_NULL(var,
                          platform::errors::InvalidArgument(
                              "StepScopes var is empty in RecurrentGradOp"));
501 502 503
  auto *step_scopes = var->GetMutable<StepScopeVar>();
  ClearStepScopes(dev_ctx, const_cast<framework::Scope *>(&scope), step_scopes);
}
Y
Yu Yang 已提交
504

505 506 507 508
StepScopes RecurrentGradOp::CreateStepScopes(
    const platform::DeviceContext &dev_ctx, const framework::Scope &scope,
    size_t seq_len) const {
  auto *var = scope.FindVar(Input(kStepScopes));
509 510 511
  PADDLE_ENFORCE_NOT_NULL(var,
                          platform::errors::InvalidArgument(
                              "StepScopes var is empty in RecurrentGradOp"));
512 513 514
  return StepScopes(dev_ctx, scope, var->GetMutable<StepScopeVar>(),
                    Attr<bool>(kIsTrain), seq_len, true /*is_backward*/);
}
Y
Yu Yang 已提交
515

516 517 518 519 520 521
std::unordered_set<std::string> RecurrentGradOp::List2Set(
    const std::vector<std::string> &list) const {
  std::unordered_set<std::string> local_var_name_set;
  local_var_name_set.reserve(list.size());
  for (auto &each : list) {
    local_var_name_set.insert(each);
Y
Yu Yang 已提交
522
  }
523 524
  return local_var_name_set;
}
Y
Yu Yang 已提交
525

526 527 528 529
std::unordered_set<std::string> RecurrentGradOp::LocalVarNames(
    const framework::Scope &scope) const {
  return this->List2Set(scope.LocalVarNames());
}
530

531 532 533 534 535 536 537 538
std::vector<std::string> RecurrentGradOp::GradVarLists(
    const std::vector<std::string> &var_names) {
  std::vector<std::string> retv;
  retv.reserve(var_names.size());
  std::transform(var_names.begin(), var_names.end(), std::back_inserter(retv),
                 framework::GradVarName);
  return retv;
}
Y
Yu Yang 已提交
539 540

class RecurrentOpProtoMaker : public framework::OpProtoAndCheckerMaker {
541
 public:
Y
Yu Yang 已提交
542
  void Make() override {
543 544 545 546
    AddInput(RecurrentBase::kInputs, "rnn inputs").AsDuplicable();
    AddInput(RecurrentBase::kInitialStates, "rnn initial states")
        .AsDuplicable();
    AddInput(RecurrentBase::kParameters,
Y
Yu Yang 已提交
547
             "Parameters are used by step block as its input. However, the "
K
kexinzhao 已提交
548 549
             "input is not a sequence tensor. Every time step, each operator "
             "in step block just use the parameter directly.")
Y
Yu Yang 已提交
550
        .AsDuplicable();
551
    AddOutput(RecurrentBase::kOutputs,
K
kexinzhao 已提交
552
              "The output sequence of RNN. The sequence length must be same.")
Y
Yu Yang 已提交
553
        .AsDuplicable();
554
    AddOutput(RecurrentBase::kStepScopes,
K
kexinzhao 已提交
555
              "StepScopes contain all local variables in each time step.");
556 557 558 559 560 561
    AddAttr<bool>(RecurrentBase::kHasStates, "Whether has states.")
        .SetDefault(false);
    AddAttr<std::vector<std::string>>(
        RecurrentBase::kExStates,
        string::Sprintf(
            R"DOC(The ex-state variable names.
Y
Yu Yang 已提交
562 563
The ex-state means the state value in the ex-timestep or the previous time step
[%s, %s, %s] must be the same order)DOC",
564 565
            RecurrentBase::kExStates, RecurrentBase::kStates,
            RecurrentBase::kInitStateGrads));
Y
Yu Yang 已提交
566
    AddAttr<std::vector<std::string>>(
567
        RecurrentBase::kStates,
Y
Yu Yang 已提交
568 569
        string::Sprintf(
            "The state variable names. [%s, %s, %s] must be the same order",
570 571 572 573 574
            RecurrentBase::kExStates, RecurrentBase::kStates,
            RecurrentBase::kInitStateGrads));
    AddAttr<framework::BlockDesc *>(RecurrentBase::kStepBlock,
                                    "The step block inside RNN");
    AddAttr<bool>(RecurrentBase::kReverse, R"DOC(Calculate RNN reversely or not.
Y
Yu Yang 已提交
575
By default reverse=False
Y
Yan Chunwei 已提交
576

Y
Yu Yang 已提交
577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598
Assume the input data is [A, B, C, D]

if reverse is False:
  the computation of RNN is like
      A          B          C         D
      |          |          |         |
      v          v          v         v
     rnn -----> rnn -----> rnn ----> rnn
      |          |          |         |
      v          v          v         v
      o          o          o         o

if reverse is True
  the computation of RNN is like
      A          B          C         D
      |          |          |         |
      v          v          v         v
     rnn <----- rnn <----- rnn <---- rnn
      |          |          |         |
      v          v          v         v
      o          o          o         o
)DOC").SetDefault(false);
599 600 601 602 603 604
    AddAttr<bool>(RecurrentBase::kIsTrain, "").SetDefault(true);
    AddAttr<std::vector<std::string>>(RecurrentBase::kSkipEagerDeletionVars,
                                      "Vars that would skip eager deletion."
                                      "Users should not set this manually.")
        .SetDefault(std::vector<std::string>());

K
kexinzhao 已提交
605 606 607 608 609
    AddComment(R"DOC(
Static Length Recurrent Operator.

The static length recurrent operator can only operate on fixed size sequence
data, i.e. in each mini-batch, the sequence length of all inputs are the same.
Y
Yu Yang 已提交
610 611 612 613 614

)DOC");
  }
};

H
hong 已提交
615 616
template <typename T>
class RecurrentGradOpMaker : public framework::SingleGradOpMaker<T> {
Y
Yu Yang 已提交
617
 public:
H
hong 已提交
618
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
Y
Yan Chunwei 已提交
619

Y
Yu Yang 已提交
620
 protected:
621
  void Apply(GradOpPtr<T> grad) const override {
Y
Yu Yang 已提交
622 623 624 625
    grad->SetType("recurrent_grad");
    for (auto &input_param : this->InputNames()) {
      grad->SetInput(input_param, this->Input(input_param));
      grad->SetOutput(framework::GradVarName(input_param),
626
                      this->InputGrad(input_param, false));
Y
Yu Yang 已提交
627 628 629
    }

    for (auto &output_param : this->OutputNames()) {
630
      if (output_param == RecurrentBase::kStepScopes) {
Y
Yu Yang 已提交
631 632 633 634 635 636 637 638 639 640
        grad->SetInput(output_param, this->Output(output_param));
        grad->SetInput(framework::GradVarName(output_param),
                       this->Output(output_param));
      } else {
        grad->SetInput(output_param, this->Output(output_param));
        grad->SetInput(framework::GradVarName(output_param),
                       this->OutputGrad(output_param));
      }
    }
    grad->SetAttrMap(this->Attrs());
H
hong 已提交
641
    grad->SetBlockAttr(RecurrentBase::kStepBlock, this->grad_block_[0]);
Y
Yan Chunwei 已提交
642 643 644
  }
};

Y
Yu Yang 已提交
645 646 647
class RecurrentGradOpShapeInference : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext *ctx) const override {
648
    std::vector<std::string> output{RecurrentBase::kOutputs};
C
chengduo 已提交
649 650 651

    // In some case the kInitialStates is empty.
    // If the kInitialStates is empty, all the states should be empty.
652
    if (!ctx->HasInputs(RecurrentBase::kInitialStates)) {
C
chengduo 已提交
653
      PADDLE_ENFORCE_EQ(
654 655 656
          ctx->Attrs()
              .Get<std::vector<std::string>>(RecurrentBase::kExStates)
              .size(),
657 658
          0, platform::errors::InvalidArgument("The Attr(%s) should be empty.",
                                               RecurrentBase::kExStates));
C
chengduo 已提交
659
      PADDLE_ENFORCE_EQ(
660 661 662
          ctx->Attrs()
              .Get<std::vector<std::string>>(RecurrentBase::kStates)
              .size(),
663 664
          0, platform::errors::InvalidArgument("The Attr(%s) should be empty.",
                                               RecurrentBase::kStates));
Y
Yu Yang 已提交
665
    }
C
chengduo 已提交
666

667 668 669 670 671 672 673 674
    PADDLE_ENFORCE_EQ(
        ctx->HasInputs(RecurrentBase::kInputs), true,
        platform::errors::InvalidArgument("The input(%s) should not be empty.",
                                          RecurrentBase::kInputs));
    PADDLE_ENFORCE_EQ(
        ctx->HasInputs(RecurrentBase::kOutputs), true,
        platform::errors::InvalidArgument("The input(%s) should not be empty.",
                                          RecurrentBase::kOutputs));
C
chengduo 已提交
675 676

    // In some case the kInitialStates is empty.
677 678 679
    if (ctx->HasInputs(RecurrentBase::kInitialStates) &&
        ctx->HasOutputs(
            framework::GradVarName(RecurrentBase::kInitialStates))) {
680 681
      ctx->SetOutputsDim(framework::GradVarName(RecurrentBase::kInitialStates),
                         ctx->GetInputsDim(RecurrentBase::kInitialStates));
Y
Yan Chunwei 已提交
682
    }
C
chengduo 已提交
683

684 685
    PADDLE_ENFORCE_EQ(
        ctx->HasOutputs(framework::GradVarName(RecurrentBase::kInputs)), true,
686 687 688
        platform::errors::InvalidArgument(
            "The output of(%s) should not be empty.",
            framework::GradVarName(RecurrentBase::kInputs)));
689 690
    ctx->SetOutputsDim(framework::GradVarName(RecurrentBase::kInputs),
                       ctx->GetInputsDim(RecurrentBase::kInputs));
C
chengduo 已提交
691 692

    // In some case the kParameters is empty.
693
    if (ctx->HasInputs(RecurrentBase::kParameters)) {
694
      PADDLE_ENFORCE_EQ(
695
          ctx->HasOutputs(framework::GradVarName(RecurrentBase::kParameters)),
696 697 698
          true, platform::errors::InvalidArgument(
                    "The output of(%s) should not be empty.",
                    framework::GradVarName(RecurrentBase::kParameters)));
699 700
      ctx->SetOutputsDim(framework::GradVarName(RecurrentBase::kParameters),
                         ctx->GetInputsDim(RecurrentBase::kParameters));
Y
Yu Yang 已提交
701 702 703
    }
  }
};
Y
Yan Chunwei 已提交
704 705 706 707

}  // namespace operators
}  // namespace paddle

H
hong 已提交
708 709 710 711
REGISTER_OPERATOR(
    recurrent, paddle::operators::RecurrentOp,
    paddle::operators::RecurrentOpProtoMaker,
    paddle::operators::RecurrentGradOpMaker<paddle::framework::OpDesc>);
Y
Yu Yang 已提交
712 713
REGISTER_OPERATOR(recurrent_grad, paddle::operators::RecurrentGradOp,
                  paddle::operators::RecurrentGradOpShapeInference);