reader.h 4.1 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
//   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include "paddle/framework/ddim.h"
#include "paddle/framework/lod_tensor.h"

namespace paddle {
namespace framework {

F
fengjiayi 已提交
23
class ReaderBase {
F
fengjiayi 已提交
24
 public:
F
fengjiayi 已提交
25 26 27
  virtual std::vector<LoDTensor> ReadNext() = 0;
  virtual bool HasNext() const = 0;

F
fengjiayi 已提交
28 29
  virtual DDim shape(size_t idx) const = 0;
  virtual std::vector<DDim> shapes() const = 0;
F
fengjiayi 已提交
30

F
fengjiayi 已提交
31 32
  virtual ~ReaderBase() {}
};
F
fengjiayi 已提交
33

F
fengjiayi 已提交
34 35
class FileReader : public ReaderBase {
 public:
F
fengjiayi 已提交
36 37 38 39
  explicit FileReader(const std::vector<DDim>& shapes) : shapes_(shapes) {
    PADDLE_ENFORCE(!shapes_.empty());
  }

F
fengjiayi 已提交
40 41 42 43
  DDim shape(size_t idx) const override;
  std::vector<DDim> shapes() const override { return shapes_; }

 protected:
F
fengjiayi 已提交
44 45 46
  std::vector<DDim> shapes_;
};

F
fengjiayi 已提交
47 48
class ReaderDecorator : public ReaderBase {
 public:
F
fengjiayi 已提交
49 50 51 52
  explicit ReaderDecorator(ReaderBase* reader) : reader_(reader) {
    PADDLE_ENFORCE_NOT_NULL(reader_);
  }

F
fengjiayi 已提交
53 54 55 56 57 58 59 60 61
  bool HasNext() const override { return reader_->HasNext(); }

  DDim shape(size_t idx) const override { return reader_->shape(idx); }
  std::vector<DDim> shapes() const override { return reader_->shapes(); }

 protected:
  ReaderBase* reader_;
};

F
fengjiayi 已提交
62 63
// file readers

F
fengjiayi 已提交
64
template <typename T>
F
fengjiayi 已提交
65
class RandomReader : public FileReader {
F
fengjiayi 已提交
66
 public:
F
fengjiayi 已提交
67 68
  RandomReader(const std::vector<DDim>& shapes, float min, float max)
      : FileReader(shapes), min_(min), max_(max) {
F
fengjiayi 已提交
69 70 71
    PADDLE_ENFORCE_LE(min, max,
                      "'min' should be less than or equal to 'max'.(%f vs %f)",
                      min, max);
F
fengjiayi 已提交
72 73 74
    unsigned int seed = std::random_device()();
    engine_.seed(seed);
    dist_ = std::uniform_real_distribution<float>(min_, max_);
F
fengjiayi 已提交
75 76 77 78
  }

  std::vector<LoDTensor> ReadNext() override {
    std::vector<LoDTensor> res;
F
fengjiayi 已提交
79 80
    res.reserve(shapes_.size());
    for (const DDim& shape : shapes_) {
F
fengjiayi 已提交
81 82 83 84 85 86 87 88 89
      PADDLE_ENFORCE_GE(
          shape.size(), 2,
          "The rank of input data should be 2 at least.(Now it's %d)",
          shape.size());
      LoDTensor out;
      out.Resize(shape);
      T* data = out.mutable_data<T>(platform::CPUPlace());
      int64_t numel = product(shape);
      for (int64_t i = 0; i < numel; ++i) {
F
fengjiayi 已提交
90
        data[i] = dist_(engine_);
F
fengjiayi 已提交
91 92 93 94 95 96 97
      }
      res.push_back(out);
    }
    return res;
  }

  bool HasNext() const override { return true; }
F
fengjiayi 已提交
98 99 100 101

 private:
  float min_;
  float max_;
F
fengjiayi 已提交
102 103
  std::minstd_rand engine_;
  std::uniform_real_distribution<float> dist_;
F
fengjiayi 已提交
104 105 106 107
};

// decorators

F
fengjiayi 已提交
108
class ShuffleReader : public ReaderDecorator {
F
fengjiayi 已提交
109
 public:
F
fengjiayi 已提交
110 111
  ShuffleReader(ReaderBase* reader, int buffer_size)
      : ReaderDecorator(reader), buffer_size_(buffer_size), iteration_pos_(0) {
F
fengjiayi 已提交
112 113
    buffer_.reserve(buffer_size);
  }
F
fengjiayi 已提交
114

F
fengjiayi 已提交
115
  std::vector<LoDTensor> ReadNext() override;
F
fengjiayi 已提交
116 117

 private:
F
fengjiayi 已提交
118 119 120
  int buffer_size_;
  std::vector<std::vector<LoDTensor>> buffer_;
  size_t iteration_pos_;
F
fengjiayi 已提交
121 122
};

F
fengjiayi 已提交
123
class BatchReader : public ReaderDecorator {
F
fengjiayi 已提交
124
 public:
F
fengjiayi 已提交
125 126
  BatchReader(ReaderBase* reader, int batch_size)
      : ReaderDecorator(reader), batch_size_(batch_size) {
F
fengjiayi 已提交
127 128 129
    buffer_.reserve(batch_size_);
  }

F
fengjiayi 已提交
130
  std::vector<LoDTensor> ReadNext() override;
F
fengjiayi 已提交
131 132

 private:
F
fengjiayi 已提交
133 134
  int batch_size_;
  std::vector<std::vector<LoDTensor>> buffer_;
F
fengjiayi 已提交
135
};
F
fengjiayi 已提交
136

F
fengjiayi 已提交
137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
class ReaderHolder {
 public:
  void Reset(ReaderBase* reader) { reader_.reset(reader); }

  ReaderBase* Get() const { return reader_.get(); }

  std::vector<LoDTensor> ReadNext() { return reader_->ReadNext(); }
  bool HasNext() const { return reader_->HasNext(); }

  DDim shape(size_t idx) const { return reader_->shape(idx); }
  std::vector<DDim> shapes() const { return reader_->shapes(); }

 private:
  std::unique_ptr<ReaderBase> reader_;
};

F
fengjiayi 已提交
153 154
}  // namespace framework
}  // namespace paddle