rnn_memory_helper_op.cc 6.3 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yang Yang(Tony) 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yang Yang(Tony) 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yang Yang(Tony) 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yang Yang(Tony) 已提交
14

Y
Yi Wang 已提交
15 16
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/operator.h"
Y
Yang Yang(Tony) 已提交
17 18 19 20 21 22 23 24 25 26

namespace paddle {
namespace operators {
class RNNMemoryHelperOp : public framework::OperatorBase {
 public:
  RNNMemoryHelperOp(const std::string &type,
                    const framework::VariableNameMap &inputs,
                    const framework::VariableNameMap &outputs,
                    const framework::AttributeMap &attrs)
      : OperatorBase(type, inputs, outputs, attrs) {}
27 28 29 30

 private:
  void RunImpl(const framework::Scope &scope,
               const platform::Place &dev_place) const override {
Y
Yang Yang(Tony) 已提交
31 32 33 34 35 36 37 38 39 40 41 42
    auto mem_var_name = Input("X");
    auto *mem_var = scope.FindVar(mem_var_name);
    PADDLE_ENFORCE(mem_var != nullptr,
                   "Cannot find mem_var in scope, mem_var_name is %s",
                   mem_var_name);

    auto out_name = this->Output("Out");
    auto *out_var = scope.FindVar(out_name);
    PADDLE_ENFORCE(out_var != nullptr,
                   "Cannot find out_var in scope, out_var_name is %s",
                   out_name);

C
chengduo 已提交
43 44 45
    platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
    auto &dev_ctx = *pool.Get(dev_place);

Y
Yang Yang(Tony) 已提交
46 47
    auto *out_tensor = out_var->GetMutable<framework::LoDTensor>();
    auto &mem_tensor = mem_var->Get<framework::LoDTensor>();
C
chengduo 已提交
48
    framework::TensorCopy(mem_tensor, dev_place, dev_ctx, out_tensor);
Y
Yang Yang(Tony) 已提交
49 50 51 52 53 54 55
    out_tensor->set_lod(mem_tensor.lod());
  }
};

class RNNMemoryHelperOpShapeInference : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext *ctx) const override {
56 57 58 59
    PADDLE_ENFORCE(ctx->HasInput("X"),
                   "Input(X) of rnn_memory_helper op should not be null.");
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   "Output of rnn_memory_helper op should not be null.");
60
    ctx->ShareDim("X", /*->*/ "Out");
Y
Yang Yang(Tony) 已提交
61 62 63 64 65 66
    ctx->ShareLoD("X", /*->*/ "Out");
  }
};

class RNNMemoryHelperOpInfoMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
67
  void Make() override {
Y
Yang Yang(Tony) 已提交
68 69
    AddInput("X", "");
    AddOutput("Out", "");
F
fengjiayi 已提交
70
    AddAttr<int>("dtype",
Y
Yang Yang(Tony) 已提交
71 72
                 "(int, default 5 (FP32)) "
                 "Output data type")
73
        .SetDefault(framework::proto::VarType::FP32);
Y
Yang Yang(Tony) 已提交
74 75 76 77 78 79 80 81 82 83 84
    AddComment("");
  }
};

class RNNMemoryHelperGradOp : public framework::OperatorBase {
 public:
  RNNMemoryHelperGradOp(const std::string &type,
                        const framework::VariableNameMap &inputs,
                        const framework::VariableNameMap &outputs,
                        const framework::AttributeMap &attrs)
      : OperatorBase(type, inputs, outputs, attrs) {}
85 86 87 88

 private:
  void RunImpl(const framework::Scope &scope,
               const platform::Place &dev_place) const override {
Y
Yang Yang(Tony) 已提交
89 90 91 92 93
    auto out_grad_var_name = Input(framework::GradVarName("Out"));
    auto *out_grad_var = scope.FindVar(out_grad_var_name);

    auto in_grad_var_name = Output(framework::GradVarName("X"));
    auto *in_grad_var = scope.FindVar(in_grad_var_name);
94

Y
Yang Yang(Tony) 已提交
95 96 97 98
    PADDLE_ENFORCE(in_grad_var != nullptr,
                   "Cannot find in_grad_var in scope, name is %s",
                   in_grad_var_name);

C
chengduo 已提交
99 100 101
    platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
    auto &dev_ctx = *pool.Get(dev_place);

Y
Yang Yang(Tony) 已提交
102
    if (out_grad_var == nullptr) {
M
minqiyang 已提交
103
      VLOG(5) << "Using fill constant 0 as starting gradient";
Y
Yang Yang(Tony) 已提交
104 105 106 107 108
      auto in_var_name = Input("X");
      auto *in_var = scope.FindVar(in_var_name);
      auto &in_var_tensor = in_var->Get<framework::LoDTensor>();

      framework::AttributeMap attrs;
Y
Yu Yang 已提交
109
      attrs["dtype"] = in_var_tensor.type();
Y
Yang Yang(Tony) 已提交
110 111 112 113 114
      attrs["shape"] = framework::vectorize2int(in_var_tensor.dims());
      attrs["value"] = 0.0f;

      auto zero_op = framework::OpRegistry::CreateOp(
          "fill_constant", {}, {{"Out", {in_grad_var_name}}}, attrs);
D
dzhwinter 已提交
115
      zero_op->Run(scope, dev_place);
Y
Yang Yang(Tony) 已提交
116 117 118
    } else {
      auto &out_grad_tensor = out_grad_var->Get<framework::LoDTensor>();
      auto *in_grad_tensor = in_grad_var->GetMutable<framework::LoDTensor>();
C
chengduo 已提交
119 120
      framework::TensorCopy(out_grad_tensor, dev_place, dev_ctx,
                            in_grad_tensor);
Y
Yang Yang(Tony) 已提交
121 122 123 124 125 126 127 128
      in_grad_tensor->set_lod(out_grad_tensor.lod());
    }
  }
};

class RNNMemoryHelperGradOpInfoMaker
    : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
129
  void Make() override {
Y
Yang Yang(Tony) 已提交
130 131 132 133
    AddInput(framework::GradVarName("Out"), "");
    AddInput("X", "");
    AddInput("Out", "");
    AddOutput(framework::GradVarName("X"), "");
F
fengjiayi 已提交
134
    AddAttr<int>("dtype",
Y
Yang Yang(Tony) 已提交
135 136
                 "(int, default 5 (FP32)) "
                 "Output data type")
137
        .SetDefault(framework::proto::VarType::FP32);
Y
Yang Yang(Tony) 已提交
138 139 140 141 142 143 144 145
    AddComment("");
  }
};

class RNNMemoryHelperGradOpShapeInference : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext *ctx) const override {
    auto x_grad_name = framework::GradVarName("X");
146 147 148 149 150
    PADDLE_ENFORCE(ctx->HasOutput(x_grad_name),
                   "Gradient of Input(X) in rnn_memory_helper_grad of should "
                   "not be null.");
    PADDLE_ENFORCE(ctx->HasInput("X"),
                   "Input(X) of rnn_memory_helper_grad of should not be null.");
Y
Yu Yang 已提交
151 152
    ctx->SetOutputDim(x_grad_name, ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ x_grad_name);
Y
Yang Yang(Tony) 已提交
153 154 155 156 157 158 159 160 161 162 163 164 165 166
  }
};

}  // namespace operators
}  // namespace paddle

REGISTER_OPERATOR(rnn_memory_helper, paddle::operators::RNNMemoryHelperOp,
                  paddle::operators::RNNMemoryHelperOpInfoMaker,
                  paddle::operators::RNNMemoryHelperOpShapeInference,
                  paddle::framework::DefaultGradOpDescMaker<true>);
REGISTER_OPERATOR(rnn_memory_helper_grad,
                  paddle::operators::RNNMemoryHelperGradOp,
                  paddle::operators::RNNMemoryHelperGradOpInfoMaker,
                  paddle::operators::RNNMemoryHelperGradOpShapeInference);