create_custom_reader_op.cc 7.8 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
//   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15
#include "paddle/fluid/framework/executor.h"
F
fengjiayi 已提交
16 17 18 19 20 21 22 23
#include "paddle/fluid/operators/reader/reader_op_registry.h"

namespace paddle {
namespace operators {
namespace reader {

class CustomReader : public framework::DecoratedReader {
 public:
F
fengjiayi 已提交
24 25
  CustomReader(const std::shared_ptr<ReaderBase>& reader,
               const framework::BlockDesc& sub_block,
F
fengjiayi 已提交
26 27 28
               const std::vector<std::string>& source_var_names,
               const std::vector<std::string>& sink_var_names)
      : DecoratedReader(reader),
F
fengjiayi 已提交
29 30
        program_(*sub_block.Program()),
        sub_block_id_(sub_block.ID()),
F
fengjiayi 已提交
31
        exe_(framework::Executor(platform::CPUPlace())),
F
fengjiayi 已提交
32
        source_var_names_(source_var_names),
F
fengjiayi 已提交
33
        sink_var_names_(sink_var_names) {}
F
fengjiayi 已提交
34

35
  void ReadNextImpl(std::vector<framework::LoDTensor>* out) override;
F
fengjiayi 已提交
36 37

 private:
F
fengjiayi 已提交
38 39
  const framework::ProgramDesc program_;
  int sub_block_id_;
F
fengjiayi 已提交
40
  framework::Executor exe_;
F
fengjiayi 已提交
41
  framework::Scope scope_;
F
fengjiayi 已提交
42 43 44 45 46 47 48 49 50 51 52 53 54 55

  std::vector<std::string> source_var_names_;
  std::vector<std::string> sink_var_names_;
};

class CreateCustomReaderOp : public framework::OperatorBase {
 public:
  using framework::OperatorBase::OperatorBase;

 private:
  void RunImpl(const framework::Scope& scope,
               const platform::Place& dev_place) const override {
    auto* out = scope.FindVar(Output("Out"))
                    ->template GetMutable<framework::ReaderHolder>();
F
fengjiayi 已提交
56
    auto* sub_block = Attr<framework::BlockDesc*>("sub_block");
F
fengjiayi 已提交
57 58 59 60 61
    if (out->Get() != nullptr) {
      return;
    }
    const auto& underlying_reader = scope.FindVar(Input("UnderlyingReader"))
                                        ->Get<framework::ReaderHolder>();
62 63 64 65
    out->Reset(framework::MakeDecoratedReader<CustomReader>(
        underlying_reader, *sub_block,
        Attr<std::vector<std::string>>("source_var_names"),
        Attr<std::vector<std::string>>("sink_var_names")));
F
fengjiayi 已提交
66 67 68 69
  }
};

class CreateCustomReaderOpMaker : public DecoratedReaderMakerBase {
F
fengjiayi 已提交
70 71
 protected:
  void Apply() override {
F
fengjiayi 已提交
72 73 74 75 76 77 78 79 80 81 82 83
    AddAttr<framework::BlockDesc*>(
        "sub_block", "The block to hold all preprocessing operators.");
    AddAttr<std::vector<std::string>>(
        "source_var_names",
        "Source variables are starting points of data preprocessing. They hold "
        "preprocessing's input tensors. Each source variable corresponds to "
        "one of underlying reader's output datas.");
    AddAttr<std::vector<std::string>>(
        "sink_var_names",
        "Sink variables are ending points of data preprocessing. They hold "
        "preprocessing's output tensors. Each sink variable corresponds to "
        "one of custom reader's output datas.");
F
fengjiayi 已提交
84 85 86
    AddComment(R"DOC(
      CreateCustomReader Operator

M
minqiyang 已提交
87 88 89 90
      A custom reader can be used for input data preprocessing.
      A custom reader holds its own sub-block, which will be executed in CPU
      in its 'ReadNext()' function. Users can configurate their own
      preprocessing pipelines by inserting operators into custom reader's
F
fengjiayi 已提交
91
      sub-block.
F
fengjiayi 已提交
92 93 94 95
    )DOC");
  }
};

96 97 98 99 100 101 102 103
class CustomReaderInferShape : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(!ctx->IsRuntime(),
                   "'CustomReaderInferShape' should only be invoked during "
                   "compile time.");
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   "The output decorated reader should not be null.");
F
fengjiayi 已提交
104 105
    const auto* sub_block =
        ctx->Attrs().Get<framework::BlockDesc*>("sub_block");
106 107 108 109 110
    const auto sink_var_names =
        ctx->Attrs().Get<std::vector<std::string>>("sink_var_names");
    std::vector<std::vector<int64_t>> res_dims;
    std::vector<int32_t> res_lod_levels;
    for (const std::string& var_name : sink_var_names) {
F
fengjiayi 已提交
111
      auto* sink_var = sub_block->FindVar(var_name);
112 113 114 115 116 117 118 119 120 121 122 123 124
      PADDLE_ENFORCE_NOT_NULL(sink_var);
      res_dims.emplace_back(sink_var->GetShape());
      res_lod_levels.push_back(sink_var->GetLoDLevel());
    }
    auto* out_reader =
        boost::get<framework::VarDesc*>(ctx->GetOutputVarPtrs("Out")[0]);
    out_reader->SetShapes(res_dims);
    out_reader->SetLoDLevels(res_lod_levels);
  }
};

class CustomReaderInferVarType : public framework::VarTypeInference {
 public:
M
minqiyang 已提交
125 126 127 128
  void operator()(framework::InferVarTypeContext* ctx) const override {
    auto& out_var_name = ctx->Output("Out")[0];
    PADDLE_ENFORCE(ctx->HasVar(out_var_name));
    ctx->SetType(out_var_name, framework::proto::VarType::READER);
F
fengjiayi 已提交
129

130
    auto sink_var_names =
M
minqiyang 已提交
131
        boost::get<std::vector<std::string>>(ctx->GetAttr("sink_var_names"));
F
fengjiayi 已提交
132
    const auto* sub_block =
M
minqiyang 已提交
133
        boost::get<framework::BlockDesc*>(ctx->GetAttr("sub_block"));
134 135
    std::vector<framework::proto::VarType::Type> res_data_types;
    for (const std::string& var_name : sink_var_names) {
F
fengjiayi 已提交
136
      framework::VarDesc* var = sub_block->FindVar(var_name);
137 138 139
      PADDLE_ENFORCE_NOT_NULL(var);
      res_data_types.emplace_back(var->GetDataType());
    }
M
minqiyang 已提交
140
    ctx->SetDataTypes(out_var_name, res_data_types);
141 142 143
  }
};

144
void CustomReader::ReadNextImpl(std::vector<framework::LoDTensor>* out) {
145 146 147 148 149 150 151
  out->clear();
  std::vector<framework::LoDTensor> underlying_outs;
  reader_->ReadNext(&underlying_outs);
  if (underlying_outs.empty()) {
    // There is not next data.
    return;
  }
F
fengjiayi 已提交
152 153 154 155 156
  PADDLE_ENFORCE(source_var_names_.size() == underlying_outs.size(),
                 "The size of source_var_names(%d) and the size of "
                 "underlying_outs(%d) are not consistent. Each feeding element "
                 "must have its own source variable.",
                 source_var_names_.size(), underlying_outs.size());
F
fengjiayi 已提交
157 158 159
  // The scope for CustomReader's sub-block should be independent and shouldn't
  // be any other computation scope's child. Otherwise, data preprocessing and
  // compution cannot be concurrent.
F
fengjiayi 已提交
160
  framework::Scope* exe_scope = &scope_.NewScope();
161
  // 1. Copy LoDTensors from underlying reader's output to source variables.
F
fengjiayi 已提交
162
  for (size_t i = 0; i < source_var_names_.size(); ++i) {
F
fengjiayi 已提交
163
    framework::Variable* var = exe_scope->Var(source_var_names_[i]);
164 165 166
    framework::LoDTensor* tensor = var->GetMutable<framework::LoDTensor>();
    tensor->ShareDataWith(underlying_outs[i]);
    tensor->set_lod(underlying_outs[i].lod());
F
fengjiayi 已提交
167
  }
168
  // 2. Run the sub-block.
Z
Zeng Jinle 已提交
169
  exe_.Run(program_, exe_scope, sub_block_id_, false, true, {}, true);
170 171 172
  // 3. Copy LoDTensors from sink variables to out.
  out->resize(sink_var_names_.size());
  for (size_t i = 0; i < sink_var_names_.size(); ++i) {
173 174 175 176 177
    auto* var = exe_scope->FindVar(sink_var_names_[i]);
    PADDLE_ENFORCE_NOT_NULL(var, platform::errors::NotFound(
                                     "The variable %s is not in current scope.",
                                     sink_var_names_[i]));
    const auto& tensor = var->Get<framework::LoDTensor>();
F
fengjiayi 已提交
178
    framework::TensorCopySync(tensor, platform::CPUPlace(), &(*out)[i]);
179
  }
F
fengjiayi 已提交
180
  scope_.DeleteScope(exe_scope);
F
fengjiayi 已提交
181 182 183 184 185
}

}  // namespace reader
}  // namespace operators
}  // namespace paddle
186 187

namespace ops = paddle::operators::reader;
H
hong 已提交
188 189 190 191 192 193
REGISTER_OPERATOR(
    create_custom_reader, ops::CreateCustomReaderOp,
    ops::CreateCustomReaderOpMaker, ops::CustomReaderInferShape,
    ops::CustomReaderInferVarType,
    paddle::framework::EmptyGradOpMaker<paddle::framework::OpDesc>,
    paddle::framework::EmptyGradOpMaker<paddle::imperative::OpBase>)