read_op.cc 6.7 KB
Newer Older
1
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15
#include "paddle/fluid/framework/framework.pb.h"
Y
Yi Wang 已提交
16 17
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
Y
Yu Yang 已提交
18
#include "paddle/fluid/operators/detail/safe_ref.h"
W
Wu Yi 已提交
19
#include "paddle/fluid/platform/profiler.h"
F
fengjiayi 已提交
20 21 22 23

namespace paddle {
namespace operators {

24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43
// Returns true if the two dimensions are compatible.
// A dimension is compatible with the other if:
// 1. The length of the dimensions are same.
// 2. Each non-negative number of the two dimentions are same.
// 3. For negative number in a dimention, it means unknown so it is compatible
//    with any number.
bool DimensionIsCompatibleWith(const framework::DDim& first,
                               const framework::DDim& second) {
  int dim_size = first.size();
  if (dim_size != second.size()) {
    return false;
  }
  for (int i = 0; i < dim_size; ++i) {
    if (first[i] >= 0 && second[i] >= 0 && first[i] != second[i]) {
      return false;
    }
  }
  return true;
}

F
fengjiayi 已提交
44 45 46 47 48 49 50
class ReadInferShape : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("Reader"),
                   "The ReadOp must take a reader as input.");
    PADDLE_ENFORCE(ctx->HasOutputs("Out"),
                   "The ReadOp should be assigned with output.");
Q
Qiao Longfei 已提交
51 52 53 54 55 56 57
    if (!ctx->IsRuntime() && ctx->Attrs().Get<bool>("infer_out")) {
      std::vector<framework::DDim> reader_dims = ctx->GetReaderDims("Reader");
      std::vector<std::string> out_names = ctx->Outputs("Out");
      PADDLE_ENFORCE_EQ(
          reader_dims.size(), out_names.size(),
          "The reader's dim number doesn't match the output number.");
      ctx->SetOutputsDim("Out", reader_dims);
S
sneaxiy 已提交
58 59 60 61 62 63 64 65 66 67 68 69
      auto in_desc =
          boost::get<framework::VarDesc*>(ctx->GetInputVarPtrs("Reader")[0]);
      auto in_lod_levels = in_desc->GetLoDLevels();
      auto out_var_ptrs = ctx->GetOutputVarPtrs("Out");
      PADDLE_ENFORCE_EQ(in_lod_levels.size(), out_var_ptrs.size(),
                        "LoDLevels of Input(Reader) must be the same as the "
                        "number of Outputs(Out).");
      for (size_t i = 0; i < out_var_ptrs.size(); ++i) {
        auto* out_desc = boost::get<framework::VarDesc*>(out_var_ptrs[i]);
        out_desc->SetLoDLevel(in_lod_levels[i]);
      }
    }
F
fengjiayi 已提交
70 71 72 73 74
  }
};

class ReadInferVarType : public framework::VarTypeInference {
 public:
M
minqiyang 已提交
75 76
  void operator()(framework::InferVarTypeContext* ctx) const override {
    bool infer_out = boost::get<bool>(ctx->GetAttr("infer_out"));
Q
Qiao Longfei 已提交
77
    if (infer_out) {
M
minqiyang 已提交
78 79 80
      std::string reader_name = ctx->Input("Reader")[0];
      std::vector<std::string> out_names = ctx->Output("Out");
      auto dtypes = ctx->GetDataTypes(reader_name);
Q
Qiao Longfei 已提交
81 82
      PADDLE_ENFORCE_EQ(dtypes.size(), out_names.size());
      for (size_t i = 0; i < dtypes.size(); ++i) {
M
minqiyang 已提交
83 84
        ctx->SetType(out_names[i], framework::proto::VarType::LOD_TENSOR);
        ctx->SetDataType(out_names[i], dtypes[i]);
Q
Qiao Longfei 已提交
85
      }
F
fengjiayi 已提交
86 87 88 89 90 91 92
    }
  }
};

class ReadOp : public framework::OperatorBase {
 public:
  using framework::OperatorBase::OperatorBase;
93 94 95 96

 private:
  void RunImpl(const framework::Scope& scope,
               const platform::Place& dev_place) const override {
Q
Qiao Longfei 已提交
97
    VLOG(3) << "read op in";
F
fengjiayi 已提交
98
    framework::ReaderHolder* reader =
Y
Yu Yang 已提交
99 100 101
        detail::Ref(scope.FindVar(Input("Reader")),
                    "Cannot find reader variable %s", Input("Reader"))
            .GetMutable<framework::ReaderHolder>();
F
fengjiayi 已提交
102 103
    std::vector<std::string> out_arg_names = Outputs("Out");
    std::vector<framework::LoDTensor> ins;
W
Wu Yi 已提交
104 105

    // For profiling
106
    platform::RecordEvent record_event(Type());
W
Wu Yi 已提交
107

F
fengjiayi 已提交
108
    reader->ReadNext(&ins);
109
    if (ins.empty()) {
C
chengduo 已提交
110 111
      VLOG(3) << "throw_eof_exp";
      PADDLE_THROW_EOF();
112
    }
113 114 115 116 117 118 119 120 121 122 123
    PADDLE_ENFORCE_EQ(ins.size(), out_arg_names.size(),
                      "input size and output size of read_op do not match");

    const std::vector<framework::DDim>& shapes = reader->Shapes();
    const std::vector<framework::proto::VarType::Type>& var_types =
        reader->VarTypes();
    const std::vector<bool>& need_check_feed = reader->NeedCheckFeed();
    PADDLE_ENFORCE_EQ(out_arg_names.size(), need_check_feed.size(),
                      "output size of read_op and the number of feeded "
                      "variables of reader do not match");

124
    for (size_t i = 0; i < out_arg_names.size(); ++i) {
F
fengjiayi 已提交
125 126
      auto* out =
          scope.FindVar(out_arg_names[i])->GetMutable<framework::LoDTensor>();
127 128 129 130 131 132 133 134 135 136 137 138
      if (need_check_feed[i]) {
        auto in_dims = ins[i].dims();
        PADDLE_ENFORCE_EQ(DimensionIsCompatibleWith(shapes[i], in_dims), true,
                          "The feeded Variable %s should have dimensions = %d, "
                          "shape = [%s], but received feeded shape [%s]",
                          out_arg_names[i], shapes[i].size(), shapes[i],
                          in_dims);
        PADDLE_ENFORCE_EQ(
            ins[i].type(), var_types[i],
            "The data type of feeded Variable %s must be %s, but received %s",
            out_arg_names[i], var_types[i], ins[i].type());
      }
F
fengjiayi 已提交
139 140 141 142 143 144 145 146
      out->ShareDataWith(ins[i]);
      out->set_lod(ins[i].lod());
    }
  }
};

class ReadOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
147
  void Make() override {
F
fengjiayi 已提交
148 149
    AddInput("Reader", "(ReaderHolder) The executed reader.");
    AddOutput("Out", "(LoDTensor) The output data.").AsDuplicable();
Y
yuyang18 已提交
150 151 152 153 154 155 156
    AddAttr<bool>(
        "throw_eof_exp",
        "If set true, an exception will be thrown when the Reader "
        "yields empty (which means there is no next data).\n"
        "NOTES: This flag must be true always. It will be set to false"
        " only when the data-balance is enabled in ParallelExecutor"
        " and it is set by ParallelExecutor instance, not users.")
F
fengjiayi 已提交
157
        .SetDefault(true);
Q
Qiao Longfei 已提交
158
    AddAttr<bool>("infer_out", "").SetDefault(true);
F
fengjiayi 已提交
159 160 161 162
    AddComment(R"DOC(
      Read Operator

      Execute a given reader once and output data.
F
fengjiayi 已提交
163
    )DOC");
F
fengjiayi 已提交
164 165 166 167
  }
};

}  // namespace operators
F
fengjiayi 已提交
168 169 170 171 172
}  // namespace paddle

namespace ops = paddle::operators;
REGISTER_OPERATOR(read, ops::ReadOp, ops::ReadInferShape, ops::ReadOpMaker,
                  paddle::framework::EmptyGradOpMaker, ops::ReadInferVarType);