create_reader_op.cc 10.0 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
//   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

Y
Yi Wang 已提交
15 16
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
F
fengjiayi 已提交
17 18 19 20

namespace paddle {
namespace operators {

F
fengjiayi 已提交
21 22
static std::vector<framework::DDim> RestoreShapes(
    const std::vector<int>& shape_concat, const std::vector<int>& ranks) {
F
fengjiayi 已提交
23 24 25 26 27 28 29 30 31 32 33
  std::vector<framework::DDim> res;
  int offset = 0;
  for (int len : ranks) {
    auto start_it = shape_concat.begin() + offset;
    auto end_it = start_it + len;
    res.push_back(framework::make_ddim(std::vector<int>(start_it, end_it)));
    offset += len;
  }
  return res;
}

F
fengjiayi 已提交
34
// general infershape for file readers
F
fengjiayi 已提交
35
class CreateFileReaderInferShape : public framework::InferShapeBase {
F
fengjiayi 已提交
36 37 38
 public:
  void operator()(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
F
fengjiayi 已提交
39
                   "The output file reader should not be null.");
F
fengjiayi 已提交
40 41 42 43 44
    const auto shape_concat =
        ctx->Attrs().Get<std::vector<int>>("shape_concat");
    const auto ranks = ctx->Attrs().Get<std::vector<int>>("ranks");
    std::vector<framework::DDim> shapes = RestoreShapes(shape_concat, ranks);
    ctx->SetReaderDims("Out", shapes);
F
fengjiayi 已提交
45 46 47 48 49 50 51 52 53 54 55 56

    if (ctx->IsRuntime()) {
      const auto lod_levels = ctx->Attrs().Get<std::vector<int>>("lod_levels");
      PADDLE_ENFORCE_EQ(
          lod_levels.size(), shapes.size(),
          "The number of 'lod_levels'(%d) doesn't match the number "
          "of 'shapes'(%d).",
          lod_levels.size(), shapes.size());
      framework::VarDesc* reader =
          boost::get<framework::VarDesc*>(ctx->GetOutputVarPtrs("Out")[0]);
      reader->SetLoDLevels(lod_levels);
    }
F
fengjiayi 已提交
57 58 59 60 61 62 63
  }
};

// general infershape for decorated readers
class CreateDecoratedReaderInferShape : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext* ctx) const override {
F
fengjiayi 已提交
64 65
    PADDLE_ENFORCE(ctx->HasInput("UnderlyingReader"),
                   "Input(UnderlyingReader) should not be null.");
F
fengjiayi 已提交
66 67
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   "The output decorated reader should not be null.");
F
fengjiayi 已提交
68
    ctx->SetReaderDims("Out", ctx->GetReaderDims("UnderlyingReader"));
F
fengjiayi 已提交
69 70 71 72 73 74 75 76

    if (ctx->IsRuntime()) {
      framework::VarDesc* in_reader = boost::get<framework::VarDesc*>(
          ctx->GetInputVarPtrs("UnderlyingReader")[0]);
      framework::VarDesc* out_reader =
          boost::get<framework::VarDesc*>(ctx->GetOutputVarPtrs("Out")[0]);
      out_reader->SetLoDLevels(in_reader->GetLoDLevels());
    }
F
fengjiayi 已提交
77 78 79
  }
};

F
fengjiayi 已提交
80 81
// general var type inference for file readers
class CreateFileReaderInferVarType : public framework::VarTypeInference {
F
fengjiayi 已提交
82 83 84 85 86
 public:
  void operator()(const framework::OpDesc& op_desc,
                  framework::BlockDesc* block) const override {
    std::string reader_name = op_desc.Output("Out")[0];
    framework::VarDesc* reader = block->FindVarRecursive(reader_name);
87
    reader->SetType(framework::proto::VarType::READER);
F
fengjiayi 已提交
88 89 90
  }
};

F
fengjiayi 已提交
91 92 93 94 95 96 97 98 99
// general var type inference for decorated readers
class CreateDecoratedReaderInferVarType : public framework::VarTypeInference {
 public:
  void operator()(const framework::OpDesc& op_desc,
                  framework::BlockDesc* block) const override {
    std::string in_reader_name = op_desc.Input("UnderlyingReader")[0];
    framework::VarDesc* in_reader = block->FindVarRecursive(in_reader_name);
    std::string out_reader_name = op_desc.Output("Out")[0];
    framework::VarDesc* out_reader = block->FindVarRecursive(out_reader_name);
100
    out_reader->SetType(framework::proto::VarType::READER);
F
fengjiayi 已提交
101 102 103 104
    out_reader->SetDataTypes(in_reader->GetDataTypes());
  }
};

F
fengjiayi 已提交
105
template <typename T>
F
fengjiayi 已提交
106
class CreateRandomDataGeneratorOp : public framework::OperatorBase {
F
fengjiayi 已提交
107 108
 public:
  using framework::OperatorBase::OperatorBase;
109 110 111 112

 private:
  void RunImpl(const framework::Scope& scope,
               const platform::Place& dev_place) const override {
F
fengjiayi 已提交
113 114
    const auto& shape_concat = Attr<std::vector<int>>("shape_concat");
    const auto& ranks = Attr<std::vector<int>>("ranks");
F
fengjiayi 已提交
115
    PADDLE_ENFORCE(!shape_concat.empty() && !ranks.empty());
F
fengjiayi 已提交
116 117 118 119
    PADDLE_ENFORCE_EQ(std::accumulate(ranks.begin(), ranks.end(), 0),
                      int(shape_concat.size()),
                      "The accumulate of all ranks should be equal to the "
                      "shape concat's length.");
F
fengjiayi 已提交
120
    std::vector<framework::DDim> shapes = RestoreShapes(shape_concat, ranks);
F
fengjiayi 已提交
121
    auto* out = scope.FindVar(Output("Out"))
F
fengjiayi 已提交
122
                    ->template GetMutable<framework::ReaderHolder>();
F
fengjiayi 已提交
123 124
    out->Reset(new framework::RandomDataGenerator<T>(shapes, Attr<float>("min"),
                                                     Attr<float>("max")));
F
fengjiayi 已提交
125 126 127
  }
};

F
fengjiayi 已提交
128 129
class CreateRandomDataGeneratorOpMaker
    : public framework::OpProtoAndCheckerMaker {
F
fengjiayi 已提交
130
 public:
F
fengjiayi 已提交
131
  CreateRandomDataGeneratorOpMaker(OpProto* op_proto, OpAttrChecker* op_checker)
F
fengjiayi 已提交
132
      : OpProtoAndCheckerMaker(op_proto, op_checker) {
F
fengjiayi 已提交
133
    AddOutput("Out", "(ReaderHolder) The created random reader.");
F
fengjiayi 已提交
134 135 136 137 138 139 140 141 142 143
    AddAttr<std::vector<int>>("shape_concat",
                              "The concat of all data's shapes.");
    AddAttr<std::vector<int>>(
        "ranks",
        "The ranks of each data."
        "e.g."
        "shape_concat = [2,3,4,5,6]"
        "ranks = [3,2]"
        "It means the reader will generate two data each time,"
        "whose shapes are [2,3,4] and [5,6] respectively.");
F
fengjiayi 已提交
144
    AddAttr<std::vector<int>>("lod_levels", "The LoD levels of each data.");
F
fengjiayi 已提交
145 146 147
    AddAttr<float>("min", "The lower bound of reader's uniform distribution.");
    AddAttr<float>("max", "The upper bound of reader's uniform distribution.");
    AddComment(R"DOC(
F
fengjiayi 已提交
148
      CreateRandomDataGenerator Operator
F
fengjiayi 已提交
149

150
      This Op creates a random reader.
F
fengjiayi 已提交
151 152 153 154 155 156
      The reader generates random data instead of really reading from files.
      Generated data follow an uniform distribution between 'min' and 'max'.
    )DOC");
  }
};

F
fengjiayi 已提交
157 158 159
class CreateShuffleReaderOp : public framework::OperatorBase {
 public:
  using framework::OperatorBase::OperatorBase;
160 161 162 163

 private:
  void RunImpl(const framework::Scope& scope,
               const platform::Place& dev_place) const override {
F
fengjiayi 已提交
164
    const auto& underlying_reader = scope.FindVar(Input("UnderlyingReader"))
F
fengjiayi 已提交
165 166 167 168 169 170 171 172 173 174 175 176 177
                                        ->Get<framework::ReaderHolder>();
    auto* out = scope.FindVar(Output("Out"))
                    ->template GetMutable<framework::ReaderHolder>();
    out->Reset(new framework::ShuffleReader(underlying_reader.Get(),
                                            Attr<int>("buffer_size")));
  }
};

class CreateShuffleReaderOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  CreateShuffleReaderOpMaker(OpProto* op_proto, OpAttrChecker* op_checker)
      : OpProtoAndCheckerMaker(op_proto, op_checker) {
    AddInput(
F
fengjiayi 已提交
178
        "UnderlyingReader",
F
fengjiayi 已提交
179 180 181 182 183 184 185
        "(ReaderHolder) The underlying reader for creating a shuffle reader.");
    AddOutput("Out", "(ReaderHolder) The created shuffle reader.");
    AddAttr<int>("buffer_size", "The shuffle buffer size.").GreaterThan(0);
    AddComment(R"DOC(
      CreateShuffleReader Operator

      A shuffle reader takes another reader as its 'underlying reader'
186
      and yields the underlying reader's outputs in a shuffled order.
F
fengjiayi 已提交
187 188 189 190 191 192 193
    )DOC");
  }
};

class CreateBatchReaderOp : public framework::OperatorBase {
 public:
  using framework::OperatorBase::OperatorBase;
194 195 196 197

 private:
  void RunImpl(const framework::Scope& scope,
               const platform::Place& dev_place) const override {
F
fengjiayi 已提交
198
    const auto& underlying_reader = scope.FindVar(Input("UnderlyingReader"))
F
fengjiayi 已提交
199 200 201 202 203 204 205 206 207 208 209 210 211
                                        ->Get<framework::ReaderHolder>();
    auto* out = scope.FindVar(Output("Out"))
                    ->template GetMutable<framework::ReaderHolder>();
    out->Reset(new framework::BatchReader(underlying_reader.Get(),
                                          Attr<int>("batch_size")));
  }
};

class CreateBatchReaderOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  CreateBatchReaderOpMaker(OpProto* op_proto, OpAttrChecker* op_checker)
      : OpProtoAndCheckerMaker(op_proto, op_checker) {
    AddInput(
F
fengjiayi 已提交
212
        "UnderlyingReader",
F
fengjiayi 已提交
213 214 215 216 217 218 219 220
        "(ReaderHolder) The underlying reader for creating a batch reader.");
    AddOutput("Out", "(ReaderHolder) The created batch reader.");
    AddAttr<int>("batch_size",
                 "How many instances the batch reader yields each time.")
        .GreaterThan(0);
    AddComment(R"DOC(
      CreateBatchReader Operator

221 222
      A batch reader takes another reader as its 'underlying reader',
      gathers the underlying reader's outputs and then yields them in batches.
F
fengjiayi 已提交
223 224 225 226
    )DOC");
  }
};

F
fengjiayi 已提交
227 228 229 230
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
F
fengjiayi 已提交
231 232
REGISTER_OPERATOR(create_random_data_generator,
                  ops::CreateRandomDataGeneratorOp<float>,
F
fengjiayi 已提交
233
                  ops::CreateFileReaderInferShape,
F
fengjiayi 已提交
234
                  ops::CreateRandomDataGeneratorOpMaker,
F
fengjiayi 已提交
235
                  paddle::framework::EmptyGradOpMaker,
F
fengjiayi 已提交
236
                  ops::CreateFileReaderInferVarType);
F
fengjiayi 已提交
237
REGISTER_OPERATOR(create_shuffle_reader, ops::CreateShuffleReaderOp,
F
fengjiayi 已提交
238
                  ops::CreateDecoratedReaderInferShape,
F
fengjiayi 已提交
239
                  ops::CreateShuffleReaderOpMaker,
F
fengjiayi 已提交
240
                  paddle::framework::EmptyGradOpMaker,
F
fengjiayi 已提交
241
                  ops::CreateDecoratedReaderInferVarType);
F
fengjiayi 已提交
242 243 244
REGISTER_OPERATOR(create_batch_reader, ops::CreateBatchReaderOp,
                  ops::CreateDecoratedReaderInferShape,
                  ops::CreateBatchReaderOpMaker,
F
fengjiayi 已提交
245
                  paddle::framework::EmptyGradOpMaker,
F
fengjiayi 已提交
246
                  ops::CreateDecoratedReaderInferVarType);