read_op.cc 7.1 KB
Newer Older
1
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15
#include "paddle/fluid/framework/framework.pb.h"
Y
Yi Wang 已提交
16 17
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
W
Wu Yi 已提交
18
#include "paddle/fluid/platform/profiler.h"
F
fengjiayi 已提交
19 20 21 22

namespace paddle {
namespace operators {

23 24 25
// Returns true if the two dimensions are compatible.
// A dimension is compatible with the other if:
// 1. The length of the dimensions are same.
T
tianshuo78520a 已提交
26 27
// 2. Each non-negative number of the two dimensions are same.
// 3. For negative number in a dimension, it means unknown so it is compatible
28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
//    with any number.
bool DimensionIsCompatibleWith(const framework::DDim& first,
                               const framework::DDim& second) {
  int dim_size = first.size();
  if (dim_size != second.size()) {
    return false;
  }
  for (int i = 0; i < dim_size; ++i) {
    if (first[i] >= 0 && second[i] >= 0 && first[i] != second[i]) {
      return false;
    }
  }
  return true;
}

F
fengjiayi 已提交
43 44 45
class ReadInferShape : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext* ctx) const override {
46 47
    OP_INOUT_CHECK(ctx->HasInput("Reader"), "Input", "Reader", "read");
    OP_INOUT_CHECK(ctx->HasOutputs("Out"), "Output", "Out", "read");
Q
Qiao Longfei 已提交
48 49 50 51 52
    if (!ctx->IsRuntime() && ctx->Attrs().Get<bool>("infer_out")) {
      std::vector<framework::DDim> reader_dims = ctx->GetReaderDims("Reader");
      std::vector<std::string> out_names = ctx->Outputs("Out");
      PADDLE_ENFORCE_EQ(
          reader_dims.size(), out_names.size(),
53 54
          platform::errors::InvalidArgument(
              "The reader's dim number doesn't match the output number."));
Q
Qiao Longfei 已提交
55
      ctx->SetOutputsDim("Out", reader_dims);
S
sneaxiy 已提交
56 57 58 59
      auto in_desc =
          boost::get<framework::VarDesc*>(ctx->GetInputVarPtrs("Reader")[0]);
      auto in_lod_levels = in_desc->GetLoDLevels();
      auto out_var_ptrs = ctx->GetOutputVarPtrs("Out");
60 61 62 63 64
      PADDLE_ENFORCE_EQ(
          in_lod_levels.size(), out_var_ptrs.size(),
          platform::errors::InvalidArgument(
              "LoDLevels of Input(Reader) must be the same as the "
              "number of Outputs(Out)."));
S
sneaxiy 已提交
65 66 67 68 69
      for (size_t i = 0; i < out_var_ptrs.size(); ++i) {
        auto* out_desc = boost::get<framework::VarDesc*>(out_var_ptrs[i]);
        out_desc->SetLoDLevel(in_lod_levels[i]);
      }
    }
F
fengjiayi 已提交
70 71 72
  }
};

73
class ReadInferVarType : public framework::StaticGraphVarTypeInference {
F
fengjiayi 已提交
74
 public:
M
minqiyang 已提交
75 76
  void operator()(framework::InferVarTypeContext* ctx) const override {
    bool infer_out = boost::get<bool>(ctx->GetAttr("infer_out"));
Q
Qiao Longfei 已提交
77
    if (infer_out) {
78 79 80
      std::string reader_name = Input(ctx, "Reader")[0];
      auto& out_names = Output(ctx, "Out");
      auto dtypes = GetDataTypes(ctx, reader_name);
Q
Qiao Longfei 已提交
81 82
      PADDLE_ENFORCE_EQ(dtypes.size(), out_names.size());
      for (size_t i = 0; i < dtypes.size(); ++i) {
83 84
        SetType(ctx, out_names[i], framework::proto::VarType::LOD_TENSOR);
        SetDataType(ctx, out_names[i], dtypes[i]);
Q
Qiao Longfei 已提交
85
      }
F
fengjiayi 已提交
86 87 88 89 90 91 92
    }
  }
};

class ReadOp : public framework::OperatorBase {
 public:
  using framework::OperatorBase::OperatorBase;
93 94 95 96

 private:
  void RunImpl(const framework::Scope& scope,
               const platform::Place& dev_place) const override {
Q
Qiao Longfei 已提交
97
    VLOG(3) << "read op in";
F
fengjiayi 已提交
98
    framework::ReaderHolder* reader =
99 100
        GET_DATA_SAFELY(scope.FindVar(Input("Reader")), "Input", "Reader",
                        "Read")
Y
Yu Yang 已提交
101
            .GetMutable<framework::ReaderHolder>();
F
fengjiayi 已提交
102 103
    std::vector<std::string> out_arg_names = Outputs("Out");
    std::vector<framework::LoDTensor> ins;
W
Wu Yi 已提交
104 105

    // For profiling
106
    platform::RecordEvent record_event(Type());
W
Wu Yi 已提交
107

F
fengjiayi 已提交
108
    reader->ReadNext(&ins);
109
    if (ins.empty()) {
C
chengduo 已提交
110 111
      VLOG(3) << "throw_eof_exp";
      PADDLE_THROW_EOF();
112
    }
113 114 115 116
    PADDLE_ENFORCE_EQ(
        ins.size(), out_arg_names.size(),
        platform::errors::InvalidArgument("input data number and output data "
                                          "number of read_op do not match"));
117 118 119 120 121 122

    const std::vector<framework::DDim>& shapes = reader->Shapes();
    const std::vector<framework::proto::VarType::Type>& var_types =
        reader->VarTypes();
    const std::vector<bool>& need_check_feed = reader->NeedCheckFeed();
    PADDLE_ENFORCE_EQ(out_arg_names.size(), need_check_feed.size(),
123 124 125
                      platform::errors::InvalidArgument(
                          "output size of read_op and the number of fed "
                          "variables of reader do not match"));
126

127
    for (size_t i = 0; i < out_arg_names.size(); ++i) {
F
fengjiayi 已提交
128 129
      auto* out =
          scope.FindVar(out_arg_names[i])->GetMutable<framework::LoDTensor>();
130 131
      if (need_check_feed[i]) {
        auto in_dims = ins[i].dims();
132 133 134 135 136 137
        PADDLE_ENFORCE_EQ(
            DimensionIsCompatibleWith(shapes[i], in_dims), true,
            platform::errors::InvalidArgument(
                "The fed Variable %s should have dimensions = %d, "
                "shape = [%s], but received fed shape [%s]",
                out_arg_names[i], shapes[i].size(), shapes[i], in_dims));
138 139
        PADDLE_ENFORCE_EQ(
            ins[i].type(), var_types[i],
140 141 142
            platform::errors::InvalidArgument(
                "The data type of fed Variable %s must be %s, but received %s",
                out_arg_names[i], var_types[i], ins[i].type()));
143
      }
F
fengjiayi 已提交
144 145 146 147 148 149 150 151
      out->ShareDataWith(ins[i]);
      out->set_lod(ins[i].lod());
    }
  }
};

class ReadOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
152
  void Make() override {
F
fengjiayi 已提交
153 154
    AddInput("Reader", "(ReaderHolder) The executed reader.");
    AddOutput("Out", "(LoDTensor) The output data.").AsDuplicable();
Y
yuyang18 已提交
155 156 157 158 159 160 161
    AddAttr<bool>(
        "throw_eof_exp",
        "If set true, an exception will be thrown when the Reader "
        "yields empty (which means there is no next data).\n"
        "NOTES: This flag must be true always. It will be set to false"
        " only when the data-balance is enabled in ParallelExecutor"
        " and it is set by ParallelExecutor instance, not users.")
F
fengjiayi 已提交
162
        .SetDefault(true);
Q
Qiao Longfei 已提交
163
    AddAttr<bool>("infer_out", "").SetDefault(true);
164
    AddAttr<bool>("drop_last",
165 166
                  "Whether to drop last batches whose number is less than "
                  "actual used device number.")
167
        .SetDefault(true);
F
fengjiayi 已提交
168 169 170 171
    AddComment(R"DOC(
      Read Operator

      Execute a given reader once and output data.
F
fengjiayi 已提交
172
    )DOC");
F
fengjiayi 已提交
173 174 175 176
  }
};

}  // namespace operators
F
fengjiayi 已提交
177 178 179
}  // namespace paddle

namespace ops = paddle::operators;
H
hong 已提交
180 181 182 183 184
REGISTER_OPERATOR(
    read, ops::ReadOp, ops::ReadInferShape, ops::ReadOpMaker,
    paddle::framework::EmptyGradOpMaker<paddle::framework::OpDesc>,
    paddle::framework::EmptyGradOpMaker<paddle::imperative::OpBase>,
    ops::ReadInferVarType);