lod_rank_table_op.cc 3.3 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yu Yang 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yu Yang 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yu Yang 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yi Wang 已提交
14
#include "paddle/fluid/framework/op_registry.h"
W
wanghuancoder 已提交
15 16 17 18 19 20 21 22 23 24 25 26 27 28

namespace paddle {
namespace framework {
class InferShapeContext;
class OpDesc;
class Scope;
template <typename T>
class EmptyGradOpMaker;
}  // namespace framework
namespace imperative {
class OpBase;
}  // namespace imperative
}  // namespace paddle

Y
Yu Yang 已提交
29 30 31 32 33 34 35 36 37 38
namespace paddle {
namespace operators {

class LoDRankTableOp : public framework::OperatorBase {
 public:
  LoDRankTableOp(const std::string &type,
                 const framework::VariableNameMap &inputs,
                 const framework::VariableNameMap &outputs,
                 const framework::AttributeMap &attrs)
      : OperatorBase(type, inputs, outputs, attrs) {}
39 40 41 42

 private:
  void RunImpl(const framework::Scope &scope,
               const platform::Place &dev_place) const override {
Y
Yu Yang 已提交
43 44 45
    auto x = scope.FindVar(Input("X"))->Get<framework::LoDTensor>();
    auto *out =
        scope.FindVar(Output("Out"))->GetMutable<framework::LoDRankTable>();
M
minqiyang 已提交
46
    VLOG(10) << "Level = " << static_cast<size_t>(Attr<int>("level"));
Y
Yu Yang 已提交
47
    out->Reset(x.lod(), static_cast<size_t>(Attr<int>("level")));
M
minqiyang 已提交
48
    VLOG(10) << Input("X") << "'s lod information is " << *out;
Y
Yu Yang 已提交
49 50 51 52 53
  }
};

class LoDRankTableOpProtoMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
54
  void Make() override {
Y
Yu Yang 已提交
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
    AddInput("X",
             "(LoDTensor) input lod tensor, must contain lod information.");
    AddOutput("Out", "(LoDRankTable) The rank table of specific level.");
    AddAttr<int>("level", "(int) the specific lod level to rank.")
        .SetDefault(0)
        .EqualGreaterThan(0);
    AddComment(R"DOC(Create LoDRanTable by LoDTensor

LoD Rank Table stores the `level` of `lod` which is ordered by sequence
length in descending order. It is useful when implement dynamic RNN and is
shared by dynamic RNN memory, dynamic RNN slice input and dynamic RNN slice
output operators.
)DOC");
  }
};

class LoDRankTableInferShape : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext *context) const override {
Z
zhangchunle 已提交
74
    PADDLE_ENFORCE(context->HasInput("X"), "LoDRankTable must have input X");
Y
Yu Yang 已提交
75 76 77 78 79
  }
};

class LoDRankTableInferVarType : public framework::VarTypeInference {
 public:
M
minqiyang 已提交
80
  void operator()(framework::InferVarTypeContext *ctx) const override {
81 82
    ctx->SetOutputType("Out", framework::proto::VarType::LOD_RANK_TABLE,
                       framework::ALL_ELEMENTS);
Y
Yu Yang 已提交
83 84 85 86 87 88
  }
};

}  // namespace operators
}  // namespace paddle

H
hong 已提交
89 90 91 92 93 94 95
REGISTER_OPERATOR(
    lod_rank_table, paddle::operators::LoDRankTableOp,
    paddle::operators::LoDRankTableOpProtoMaker,
    paddle::operators::LoDRankTableInferShape,
    paddle::operators::LoDRankTableInferVarType,
    paddle::framework::EmptyGradOpMaker<paddle::framework::OpDesc>,
    paddle::framework::EmptyGradOpMaker<paddle::imperative::OpBase>);