recurrent_op_utils.cc 6.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

#include "paddle/operators/rnn/recurrent_op_utils.h"

namespace paddle {
namespace operators {
namespace rnn {

D
dongzhihong 已提交
21 22 23
namespace f = paddle::framework;

using Tensor = framework::Tensor;
24
using LoDTensor = framework::LoDTensor;
25 26 27 28 29 30 31 32 33 34

void SegmentInputs(const std::vector<Scope*>& step_scopes,
                   const std::vector<Link>& inlinks, const size_t seq_len,
                   bool infer_shape_mode) {
  PADDLE_ENFORCE(!inlinks.empty(), "no in links are provided.");
  for (size_t i = 0; i < inlinks.size(); ++i) {
    auto input_var = step_scopes[0]->FindVar(inlinks[i].external);
    PADDLE_ENFORCE(input_var != nullptr, "input link [%s] is not in scope.",
                   inlinks[i].external);

35
    LoDTensor* input = input_var->GetMutable<LoDTensor>();
D
dongzhihong 已提交
36
    f::DDim dims = input->dims();
37 38
    PADDLE_ENFORCE(static_cast<size_t>(dims[0]) == seq_len,
                   "all the inlinks must have same length");
D
dongzhihong 已提交
39
    f::DDim step_dims = slice_ddim(dims, 1, dims.size());
40 41 42 43
    for (size_t j = 0; j < seq_len; j++) {
      Tensor* step_input =
          step_scopes[j]->NewVar(inlinks[i].internal)->GetMutable<Tensor>();
      if (!infer_shape_mode) {
44 45
        // The input of operators of each step is Tensor here.
        // Maybe need to modify Slice function.
46 47 48 49 50 51 52 53 54 55 56 57 58 59
        *step_input = input->Slice<float>(j, j + 1);
      }
      step_input->Resize(step_dims);
    }
  }
}

void ConcatOutputs(const std::vector<Scope*>& step_scopes,
                   const std::vector<Link>& outlinks, const size_t seq_len,
                   bool infer_shape_mode) {
  for (size_t i = 0; i < outlinks.size(); i++) {
    auto output_var = step_scopes[0]->FindVar(outlinks[i].external);
    PADDLE_ENFORCE(output_var != nullptr, "output link [%s] is not in scope.",
                   outlinks[i].external);
60
    LoDTensor* output = output_var->GetMutable<LoDTensor>();
Y
Yan Chunwei 已提交
61

62
    if (infer_shape_mode) {
Y
Yan Chunwei 已提交
63 64 65
      auto step_scope_var = step_scopes[0]->FindVar(outlinks[i].internal);
      PADDLE_ENFORCE(step_scope_var != nullptr, "%s not in scope",
                     outlinks[i].internal);
66 67
      f::DDim step_dims =
          step_scope_var->template GetMutable<LoDTensor>()->dims();
Q
qijun 已提交
68
      std::vector<int64_t> dims_vec = vectorize(step_dims);
69
      dims_vec.insert(dims_vec.begin(), seq_len);
D
dongzhihong 已提交
70
      output->Resize(f::make_ddim(dims_vec));
71 72 73
    } else {
      output->mutable_data<float>(platform::CPUPlace());
      for (size_t j = 0; j < seq_len; j++) {
74 75 76
        LoDTensor* step_output = step_scopes[j]
                                     ->FindVar(outlinks[i].internal)
                                     ->GetMutable<LoDTensor>();
77 78 79 80 81 82 83 84 85 86 87 88 89
        // TODO(luotao02) data type and platform::DeviceContext() should set
        // correctly
        (output->Slice<float>(j, j + 1))
            .CopyFrom<float>(*step_output, platform::CPUPlace());
      }
    }
  }
}

void LinkMemories(const std::vector<Scope*>& scopes,
                  const std::vector<rnn::MemoryAttr>& memories,
                  const size_t step_id, const int offset,
                  bool infer_shape_mode) {
Y
Yan Chunwei 已提交
90 91 92 93 94 95 96 97 98
  PADDLE_ENFORCE_LT(step_id, scopes.size(),
                    "step [%d] is out of range of step scopes' size [%d]",
                    step_id, scopes.size());
  PADDLE_ENFORCE_GE(static_cast<int>(step_id) + offset, 0,
                    "offset [%d] must be large than -[%d]", offset, step_id);
  PADDLE_ENFORCE_LT(
      step_id + offset, scopes.size(),
      "offset [%d] is out of range, it must be less than (%d - %d)", offset,
      scopes.size(), step_id);
99 100 101
  auto scope = scopes[step_id];
  auto linked_scope = scopes[step_id + offset];
  for (auto& attr : memories) {
102 103
    auto mem = scope->FindVar(attr.pre_var)->GetMutable<LoDTensor>();
    auto linked_mem = linked_scope->FindVar(attr.var)->GetMutable<LoDTensor>();
104 105 106 107 108 109 110 111 112
    if (infer_shape_mode) {
      mem->Resize(linked_mem->dims());
    } else {
      mem->ShareDataWith<float>(*linked_mem);
    }
  }
}

void InitArgument(const ArgumentName& name, Argument* arg,
D
dongzhihong 已提交
113
                  const framework::OperatorBase& op) {
114 115 116
  arg->step_scopes = op.Output(name.step_scopes);

  auto inlinks = op.Inputs(name.inlinks);
Y
Yu Yang 已提交
117
  auto inlink_alias = op.Attr<std::vector<std::string>>(name.inlink_alias);
118 119 120 121 122 123 124 125 126 127 128
  PADDLE_ENFORCE(inlinks.size() == inlink_alias.size(),
                 "the size of inlinks and inlink_alias don't match:%d,%d",
                 inlinks.size(), inlink_alias.size());
  for (size_t i = 0; i < inlinks.size(); ++i) {
    rnn::Link link;
    link.external = inlinks[i];
    link.internal = inlink_alias[i];
    (arg->inlinks).push_back(link);
  }

  auto outlinks = op.Outputs(name.outlinks);
Y
Yu Yang 已提交
129
  auto outlink_alias = op.Attr<std::vector<std::string>>(name.outlink_alias);
130 131 132 133 134 135 136 137 138 139 140 141 142
  PADDLE_ENFORCE(outlinks.size() == outlink_alias.size(),
                 "the size of outlinks and outlink_alias don't match:%d,%d",
                 outlinks.size(), outlink_alias.size());
  for (size_t i = 0; i < outlinks.size(); ++i) {
    rnn::Link link;
    link.external = outlinks[i];
    link.internal = outlink_alias[i];
    (arg->outlinks).push_back(link);
  }

  auto boot_memories = op.Inputs(name.boot_memories);

  // attributes
Y
Yu Yang 已提交
143 144
  auto memories = op.Attr<std::vector<std::string>>(name.memories);
  auto pre_memories = op.Attr<std::vector<std::string>>(name.pre_memories);
145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165

  PADDLE_ENFORCE(memories.size() == boot_memories.size(),
                 "the size of memories, boot_memories don't match:%d,%d",
                 memories.size(), boot_memories.size());
  PADDLE_ENFORCE(pre_memories.size() == boot_memories.size(),
                 "the size of pre_memories, boot_memories don't match:%d,%d",
                 pre_memories.size(), boot_memories.size());
  PADDLE_ENFORCE(memories.size() > 0, "more than 1 memories should be set");

  for (size_t i = 0; i < memories.size(); ++i) {
    rnn::MemoryAttr mem_attr;
    mem_attr.var = memories[i];
    mem_attr.pre_var = pre_memories[i];
    mem_attr.boot_var = boot_memories[i];
    (arg->memories).push_back(mem_attr);
  }
}

}  // namespace rnn
}  // namespace operators
}  // namespace paddle