create_reader_op.cc 9.9 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
//   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

Y
Yi Wang 已提交
15 16
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
F
fengjiayi 已提交
17 18 19 20

namespace paddle {
namespace operators {

F
fengjiayi 已提交
21 22
static std::vector<framework::DDim> RestoreShapes(
    const std::vector<int>& shape_concat, const std::vector<int>& ranks) {
F
fengjiayi 已提交
23 24 25 26 27 28 29 30 31 32 33
  std::vector<framework::DDim> res;
  int offset = 0;
  for (int len : ranks) {
    auto start_it = shape_concat.begin() + offset;
    auto end_it = start_it + len;
    res.push_back(framework::make_ddim(std::vector<int>(start_it, end_it)));
    offset += len;
  }
  return res;
}

F
fengjiayi 已提交
34
// general infershape for file readers
F
fengjiayi 已提交
35
class CreateFileReaderInferShape : public framework::InferShapeBase {
F
fengjiayi 已提交
36 37 38
 public:
  void operator()(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
F
fengjiayi 已提交
39
                   "The output file reader should not be null.");
F
fengjiayi 已提交
40 41 42 43 44
    const auto shape_concat =
        ctx->Attrs().Get<std::vector<int>>("shape_concat");
    const auto ranks = ctx->Attrs().Get<std::vector<int>>("ranks");
    std::vector<framework::DDim> shapes = RestoreShapes(shape_concat, ranks);
    ctx->SetReaderDims("Out", shapes);
F
fengjiayi 已提交
45 46 47 48 49 50 51 52 53 54 55 56

    if (ctx->IsRuntime()) {
      const auto lod_levels = ctx->Attrs().Get<std::vector<int>>("lod_levels");
      PADDLE_ENFORCE_EQ(
          lod_levels.size(), shapes.size(),
          "The number of 'lod_levels'(%d) doesn't match the number "
          "of 'shapes'(%d).",
          lod_levels.size(), shapes.size());
      framework::VarDesc* reader =
          boost::get<framework::VarDesc*>(ctx->GetOutputVarPtrs("Out")[0]);
      reader->SetLoDLevels(lod_levels);
    }
F
fengjiayi 已提交
57 58 59 60 61 62 63
  }
};

// general infershape for decorated readers
class CreateDecoratedReaderInferShape : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext* ctx) const override {
F
fengjiayi 已提交
64 65
    PADDLE_ENFORCE(ctx->HasInput("UnderlyingReader"),
                   "Input(UnderlyingReader) should not be null.");
F
fengjiayi 已提交
66 67
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   "The output decorated reader should not be null.");
F
fengjiayi 已提交
68
    ctx->SetReaderDims("Out", ctx->GetReaderDims("UnderlyingReader"));
F
fengjiayi 已提交
69 70 71 72 73 74 75 76

    if (ctx->IsRuntime()) {
      framework::VarDesc* in_reader = boost::get<framework::VarDesc*>(
          ctx->GetInputVarPtrs("UnderlyingReader")[0]);
      framework::VarDesc* out_reader =
          boost::get<framework::VarDesc*>(ctx->GetOutputVarPtrs("Out")[0]);
      out_reader->SetLoDLevels(in_reader->GetLoDLevels());
    }
F
fengjiayi 已提交
77 78 79
  }
};

F
fengjiayi 已提交
80 81
// general var type inference for file readers
class CreateFileReaderInferVarType : public framework::VarTypeInference {
F
fengjiayi 已提交
82 83 84 85 86 87
 public:
  void operator()(const framework::OpDesc& op_desc,
                  framework::BlockDesc* block) const override {
    std::string reader_name = op_desc.Output("Out")[0];
    framework::VarDesc* reader = block->FindVarRecursive(reader_name);
    reader->SetType(framework::proto::VarDesc::READER);
F
fengjiayi 已提交
88 89 90
  }
};

F
fengjiayi 已提交
91 92 93 94 95 96 97 98 99 100 101 102 103 104
// general var type inference for decorated readers
class CreateDecoratedReaderInferVarType : public framework::VarTypeInference {
 public:
  void operator()(const framework::OpDesc& op_desc,
                  framework::BlockDesc* block) const override {
    std::string in_reader_name = op_desc.Input("UnderlyingReader")[0];
    framework::VarDesc* in_reader = block->FindVarRecursive(in_reader_name);
    std::string out_reader_name = op_desc.Output("Out")[0];
    framework::VarDesc* out_reader = block->FindVarRecursive(out_reader_name);
    out_reader->SetType(framework::proto::VarDesc::READER);
    out_reader->SetDataTypes(in_reader->GetDataTypes());
  }
};

F
fengjiayi 已提交
105
template <typename T>
F
fengjiayi 已提交
106
class CreateRandomDataGeneratorOp : public framework::OperatorBase {
F
fengjiayi 已提交
107 108 109 110 111 112
 public:
  using framework::OperatorBase::OperatorBase;
  void Run(const framework::Scope& scope,
           const platform::Place& dev_place) const override {
    const auto& shape_concat = Attr<std::vector<int>>("shape_concat");
    const auto& ranks = Attr<std::vector<int>>("ranks");
F
fengjiayi 已提交
113
    PADDLE_ENFORCE(!shape_concat.empty() && !ranks.empty());
F
fengjiayi 已提交
114 115 116 117
    PADDLE_ENFORCE_EQ(std::accumulate(ranks.begin(), ranks.end(), 0),
                      int(shape_concat.size()),
                      "The accumulate of all ranks should be equal to the "
                      "shape concat's length.");
F
fengjiayi 已提交
118
    std::vector<framework::DDim> shapes = RestoreShapes(shape_concat, ranks);
F
fengjiayi 已提交
119
    auto* out = scope.FindVar(Output("Out"))
F
fengjiayi 已提交
120
                    ->template GetMutable<framework::ReaderHolder>();
F
fengjiayi 已提交
121 122
    out->Reset(new framework::RandomDataGenerator<T>(shapes, Attr<float>("min"),
                                                     Attr<float>("max")));
F
fengjiayi 已提交
123 124 125
  }
};

F
fengjiayi 已提交
126 127
class CreateRandomDataGeneratorOpMaker
    : public framework::OpProtoAndCheckerMaker {
F
fengjiayi 已提交
128
 public:
F
fengjiayi 已提交
129
  CreateRandomDataGeneratorOpMaker(OpProto* op_proto, OpAttrChecker* op_checker)
F
fengjiayi 已提交
130
      : OpProtoAndCheckerMaker(op_proto, op_checker) {
F
fengjiayi 已提交
131
    AddOutput("Out", "(ReaderHolder) The created random reader.");
F
fengjiayi 已提交
132 133 134 135 136 137 138 139 140 141
    AddAttr<std::vector<int>>("shape_concat",
                              "The concat of all data's shapes.");
    AddAttr<std::vector<int>>(
        "ranks",
        "The ranks of each data."
        "e.g."
        "shape_concat = [2,3,4,5,6]"
        "ranks = [3,2]"
        "It means the reader will generate two data each time,"
        "whose shapes are [2,3,4] and [5,6] respectively.");
F
fengjiayi 已提交
142
    AddAttr<std::vector<int>>("lod_levels", "The LoD levels of each data.");
F
fengjiayi 已提交
143 144 145
    AddAttr<float>("min", "The lower bound of reader's uniform distribution.");
    AddAttr<float>("max", "The upper bound of reader's uniform distribution.");
    AddComment(R"DOC(
F
fengjiayi 已提交
146
      CreateRandomDataGenerator Operator
F
fengjiayi 已提交
147 148 149 150 151 152 153 154

      This Op creates a random reader. 
      The reader generates random data instead of really reading from files.
      Generated data follow an uniform distribution between 'min' and 'max'.
    )DOC");
  }
};

F
fengjiayi 已提交
155 156 157 158 159
class CreateShuffleReaderOp : public framework::OperatorBase {
 public:
  using framework::OperatorBase::OperatorBase;
  void Run(const framework::Scope& scope,
           const platform::Place& dev_place) const override {
F
fengjiayi 已提交
160
    const auto& underlying_reader = scope.FindVar(Input("UnderlyingReader"))
F
fengjiayi 已提交
161 162 163 164 165 166 167 168 169 170 171 172 173
                                        ->Get<framework::ReaderHolder>();
    auto* out = scope.FindVar(Output("Out"))
                    ->template GetMutable<framework::ReaderHolder>();
    out->Reset(new framework::ShuffleReader(underlying_reader.Get(),
                                            Attr<int>("buffer_size")));
  }
};

class CreateShuffleReaderOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  CreateShuffleReaderOpMaker(OpProto* op_proto, OpAttrChecker* op_checker)
      : OpProtoAndCheckerMaker(op_proto, op_checker) {
    AddInput(
F
fengjiayi 已提交
174
        "UnderlyingReader",
F
fengjiayi 已提交
175 176 177 178 179 180 181
        "(ReaderHolder) The underlying reader for creating a shuffle reader.");
    AddOutput("Out", "(ReaderHolder) The created shuffle reader.");
    AddAttr<int>("buffer_size", "The shuffle buffer size.").GreaterThan(0);
    AddComment(R"DOC(
      CreateShuffleReader Operator

      A shuffle reader takes another reader as its 'underlying reader'
F
fengjiayi 已提交
182 183 184 185 186 187 188 189 190 191
      and yields the underlying reader's outputs in a shuffled order. 
    )DOC");
  }
};

class CreateBatchReaderOp : public framework::OperatorBase {
 public:
  using framework::OperatorBase::OperatorBase;
  void Run(const framework::Scope& scope,
           const platform::Place& dev_place) const override {
F
fengjiayi 已提交
192
    const auto& underlying_reader = scope.FindVar(Input("UnderlyingReader"))
F
fengjiayi 已提交
193 194 195 196 197 198 199 200 201 202 203 204 205
                                        ->Get<framework::ReaderHolder>();
    auto* out = scope.FindVar(Output("Out"))
                    ->template GetMutable<framework::ReaderHolder>();
    out->Reset(new framework::BatchReader(underlying_reader.Get(),
                                          Attr<int>("batch_size")));
  }
};

class CreateBatchReaderOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  CreateBatchReaderOpMaker(OpProto* op_proto, OpAttrChecker* op_checker)
      : OpProtoAndCheckerMaker(op_proto, op_checker) {
    AddInput(
F
fengjiayi 已提交
206
        "UnderlyingReader",
F
fengjiayi 已提交
207 208 209 210 211 212 213 214 215 216
        "(ReaderHolder) The underlying reader for creating a batch reader.");
    AddOutput("Out", "(ReaderHolder) The created batch reader.");
    AddAttr<int>("batch_size",
                 "How many instances the batch reader yields each time.")
        .GreaterThan(0);
    AddComment(R"DOC(
      CreateBatchReader Operator

      A batch reader takes another reader as its 'underlying reader', 
      gathers the underlying reader's outputs and then yields them in batches. 
F
fengjiayi 已提交
217 218 219 220
    )DOC");
  }
};

F
fengjiayi 已提交
221 222 223 224
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
F
fengjiayi 已提交
225 226
REGISTER_OPERATOR(create_random_data_generator,
                  ops::CreateRandomDataGeneratorOp<float>,
F
fengjiayi 已提交
227
                  ops::CreateFileReaderInferShape,
F
fengjiayi 已提交
228
                  ops::CreateRandomDataGeneratorOpMaker,
F
fengjiayi 已提交
229
                  paddle::framework::EmptyGradOpMaker,
F
fengjiayi 已提交
230
                  ops::CreateFileReaderInferVarType);
F
fengjiayi 已提交
231
REGISTER_OPERATOR(create_shuffle_reader, ops::CreateShuffleReaderOp,
F
fengjiayi 已提交
232
                  ops::CreateDecoratedReaderInferShape,
F
fengjiayi 已提交
233
                  ops::CreateShuffleReaderOpMaker,
F
fengjiayi 已提交
234
                  paddle::framework::EmptyGradOpMaker,
F
fengjiayi 已提交
235
                  ops::CreateDecoratedReaderInferVarType);
F
fengjiayi 已提交
236 237 238
REGISTER_OPERATOR(create_batch_reader, ops::CreateBatchReaderOp,
                  ops::CreateDecoratedReaderInferShape,
                  ops::CreateBatchReaderOpMaker,
F
fengjiayi 已提交
239
                  paddle::framework::EmptyGradOpMaker,
F
fengjiayi 已提交
240
                  ops::CreateDecoratedReaderInferVarType);