lookup_table_op.cc 7.9 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/lookup_table_op.h"
H
Huihuang Zheng 已提交
16 17 18 19

#include <memory>

#include "paddle/fluid/framework/no_need_buffer_vars_inference.h"
Y
Yi Wang 已提交
20
#include "paddle/fluid/framework/var_type_inference.h"
21 22 23 24 25 26 27 28

namespace paddle {
namespace operators {

class LookupTableOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

29
  void InferShape(framework::InferShapeContext* ctx) const override {
30 31 32 33 34 35
    PADDLE_ENFORCE_EQ(ctx->HasInput("W"), true,
                      "Input(W) of LookupTableOp should not be null.");
    PADDLE_ENFORCE_EQ(ctx->HasInput("Ids"), true,
                      "Input(Ids) of LookupTableOp should not be null.");
    PADDLE_ENFORCE_EQ(ctx->HasOutput("Out"), true,
                      "Output(Out) of LookupTableOp should not be null.");
Q
Qiao Longfei 已提交
36 37 38

    auto table_dims = ctx->GetInputDim("W");
    auto ids_dims = ctx->GetInputDim("Ids");
39
    int ids_rank = ids_dims.size();
40
    VLOG(5) << "ids rank is " << ids_rank << std::endl;
41 42 43 44 45 46 47 48 49 50 51
    PADDLE_ENFORCE_EQ(
        table_dims.size(), 2,
        "ShapeError: The dimensions of the 'lookup table' must be 2. "
        "But received lookup table's dimensions = %d, "
        "lookup table's shape = [%s].",
        table_dims.size(), table_dims);
    PADDLE_ENFORCE_EQ(
        ids_dims[ids_rank - 1], 1,
        "ShapeError: The last dimensions of the 'Ids' tensor must be 1. "
        "But received Ids's last dimensions = %d, Ids's shape = [%s].",
        ids_dims[ids_rank - 1], ids_dims);
52

53 54 55 56
    auto output_dims =
        framework::vectorize(framework::slice_ddim(ids_dims, 0, ids_rank - 1));
    output_dims.push_back(table_dims[1]);
    ctx->SetOutputDim("Out", framework::make_ddim(output_dims));
57 58 59 60 61

    if (ctx->GetOutputsVarType("Out")[0] ==
        framework::proto::VarType::LOD_TENSOR) {
      ctx->ShareLoD("Ids", /*->*/ "Out");
    }
62
  }
Y
Yu Yang 已提交
63

64
 protected:
65
  framework::OpKernelType GetExpectedKernelType(
Y
Yu Yang 已提交
66
      const framework::ExecutionContext& ctx) const override {
67
    auto data_type = OperatorWithKernel::IndicateVarDataType(ctx, "W");
Q
qiaolongfei 已提交
68
    return framework::OpKernelType(data_type, ctx.device_context());
Y
Yu Yang 已提交
69
  }
70 71 72 73
};

class LookupTableOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
74
  void Make() override {
75
    AddInput("W",
C
chengduoZH 已提交
76
             "(Tensor) The input represents embedding tensors, "
K
kexinzhao 已提交
77
             "which is a learnable parameter.");
78
    AddInput("Ids",
79
             "An input with type int64 "
80
             "contains the ids to be looked up in W. "
81
             "The last dimension size must be 1.");
82
    AddOutput("Out", "The lookup results, which have the same type as W.");
C
chengduoZH 已提交
83
    AddAttr<bool>("is_sparse",
C
chengduoZH 已提交
84
                  "(boolean, default false) "
C
chengduoZH 已提交
85
                  "Sparse update.")
C
chengduoZH 已提交
86
        .SetDefault(false);
87 88 89
    AddAttr<bool>("is_distributed",
                  "(boolean, default false) distributed lookup table.")
        .SetDefault(false);
C
chengduoZH 已提交
90 91 92 93 94
    AddAttr<int64_t>("padding_idx",
                     "(int64, default -1) "
                     "If the value is -1, it makes no effect to lookup. "
                     "Otherwise the given value indicates padding the output "
                     "with zeros whenever lookup encounters it in Ids.")
95
        .SetDefault(kNoPadding);
M
minqiyang 已提交
96 97
    // NOTE(minqiyang): grad_inplace is an temporal attribute,
    // please do NOT set this attribute in python layer.
M
minqiyang 已提交
98 99 100 101
    AddAttr<bool>("grad_inplace",
                  "(boolean, default false) "
                  "If the grad op reuse the input's variable.")
        .SetDefault(false);
Q
Qiao Longfei 已提交
102 103

    // for parameter prefetch
Q
Qiao Longfei 已提交
104
    AddAttr<bool>("remote_prefetch", "").SetDefault(false);
Q
Qiao Longfei 已提交
105
    AddAttr<int>("trainer_id", "trainer id from 0 ~ worker_num.").SetDefault(0);
Q
Qiao Longfei 已提交
106 107 108
    AddAttr<std::vector<int64_t>>("height_sections",
                                  "Height for each output SelectedRows.")
        .SetDefault(std::vector<int64_t>({}));
Q
Qiao Longfei 已提交
109 110 111 112
    AddAttr<std::vector<std::string>>(
        "epmap",
        "(string vector, default 127.0.0.1:6164)"
        "Server endpoints in the order of input variables for mapping")
Q
Qiao Longfei 已提交
113
        .SetDefault({});
Q
Qiao Longfei 已提交
114 115
    AddAttr<std::vector<std::string>>(
        "table_names",
T
tianshuo78520a 已提交
116
        "(string vector, the split table names that will be fetched from "
Q
Qiao Longfei 已提交
117 118 119
        "parameter server)"
        "in the order of input variables for mapping")
        .SetDefault({});
Q
Qiao Longfei 已提交
120

C
chengduoZH 已提交
121
    AddComment(R"DOC(
C
chengduoZH 已提交
122
Lookup Table Operator.
C
chengduoZH 已提交
123

C
chengduoZH 已提交
124
This operator is used to perform lookups on the parameter W,
125
then concatenated into a dense tensor.
126

127 128
The input Ids can carry the LoD (Level of Details) information,
or not. And the output only shares the LoD information with input Ids.
C
chengduoZH 已提交
129 130 131 132 133

)DOC");
  }
};

134
DECLARE_NO_NEED_BUFFER_VARS_INFERER(LookupTableGradOpNoBuffer, "W");
H
Huihuang Zheng 已提交
135

H
hong 已提交
136 137
template <typename T>
class LookupTableGradOpMaker : public framework::SingleGradOpMaker<T> {
H
Huihuang Zheng 已提交
138
 public:
H
hong 已提交
139
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
H
Huihuang Zheng 已提交
140 141

 protected:
142
  void Apply(GradOpPtr<T> op) const override {
H
Huihuang Zheng 已提交
143 144
    op->SetType("lookup_table_grad");

H
hong 已提交
145 146 147
    op->SetInput("W", this->Input("W"));
    op->SetInput("Ids", this->Input("Ids"));
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
H
Huihuang Zheng 已提交
148

H
hong 已提交
149
    op->SetOutput(framework::GradVarName("W"), this->InputGrad("W"));
H
Huihuang Zheng 已提交
150

H
hong 已提交
151
    op->SetAttrMap(this->Attrs());
H
Huihuang Zheng 已提交
152 153 154
  }
};

155 156 157 158
class LookupTableOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

159
  void InferShape(framework::InferShapeContext* ctx) const override {
Q
Qiao Longfei 已提交
160 161
    auto table_dims = ctx->GetInputDim("W");
    ctx->SetOutputDim(framework::GradVarName("W"), table_dims);
162
  }
Y
Yu Yang 已提交
163

164
 protected:
165
  framework::OpKernelType GetExpectedKernelType(
Y
Yu Yang 已提交
166
      const framework::ExecutionContext& ctx) const override {
167 168
    auto data_type = OperatorWithKernel::IndicateVarDataType(
        ctx, framework::GradVarName("Out"));
Q
qiaolongfei 已提交
169
    return framework::OpKernelType(data_type, ctx.device_context());
Y
Yu Yang 已提交
170
  }
171 172
};

173 174
class LookupTableOpGradVarTypeInference : public framework::VarTypeInference {
 public:
M
minqiyang 已提交
175 176 177
  void operator()(framework::InferVarTypeContext* ctx) const override {
    auto out_var_name = ctx->Output(framework::GradVarName("W")).front();
    auto attr = ctx->GetAttr("is_sparse");
178 179
    bool is_sparse = boost::get<bool>(attr);
    if (is_sparse) {
M
minqiyang 已提交
180 181
      VLOG(3) << "lookup_table_grad op " << framework::GradVarName("W")
              << " is set to SelectedRows";
M
minqiyang 已提交
182
      ctx->SetType(out_var_name, framework::proto::VarType::SELECTED_ROWS);
183
    } else {
M
minqiyang 已提交
184 185
      VLOG(3) << "lookup_table_grad op " << framework::GradVarName("W")
              << " is set to LoDTensor";
M
minqiyang 已提交
186
      ctx->SetType(out_var_name, framework::proto::VarType::LOD_TENSOR);
187
    }
M
minqiyang 已提交
188
    ctx->SetDataType(out_var_name, ctx->GetDataType(ctx->Input("W")[0]));
189 190 191
  }
};

192 193 194 195
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
H
Huihuang Zheng 已提交
196
REGISTER_OPERATOR(lookup_table, ops::LookupTableOp, ops::LookupTableOpMaker,
H
hong 已提交
197 198
                  ops::LookupTableGradOpMaker<paddle::framework::OpDesc>,
                  ops::LookupTableGradOpMaker<paddle::imperative::OpBase>);
H
Huihuang Zheng 已提交
199

200
REGISTER_OPERATOR(lookup_table_grad, ops::LookupTableOpGrad,
H
Huihuang Zheng 已提交
201
                  ops::LookupTableGradOpNoBuffer,
202 203 204
                  ops::LookupTableOpGradVarTypeInference);

REGISTER_OP_CPU_KERNEL(lookup_table, ops::LookupTableKernel<float>,
205 206
                       ops::LookupTableKernel<double>,
                       ops::LookupTableKernel<int8_t>);
207 208
REGISTER_OP_CPU_KERNEL(lookup_table_grad, ops::LookupTableGradKernel<float>,
                       ops::LookupTableGradKernel<double>);