tensor.h 6.2 KB
Newer Older
Y
Yi Wang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15 16
#pragma once

17
#include <cstdint>
18
#include <cstring>
F
fengjiayi 已提交
19
#include <memory>
Y
Yu Yang 已提交
20
#include <typeindex>
F
fengjiayi 已提交
21 22 23 24 25
#include "paddle/framework/ddim.h"
#include "paddle/framework/enforce.h"
#include "paddle/memory/memory.h"
#include "paddle/platform/place.h"

Y
Yi Wang 已提交
26
namespace paddle {
27 28 29 30 31 32
namespace pybind {
namespace details {  // forward declare
template <bool less, size_t i, typename... args>
struct CastToPyBufferImpl;
}  // namespace details
}  // namespace pybind
Y
Yi Wang 已提交
33 34 35 36
namespace framework {

class Tensor {
 public:
F
fengjiayi 已提交
37
  Tensor() : numel_(0), offset_(0) {}
38

Y
Yi Wang 已提交
39 40
  template <typename T>
  const T* data() const {
F
fengjiayi 已提交
41
    CheckDims<T>();
42
    return reinterpret_cast<const T*>(
F
fengjiayi 已提交
43
        reinterpret_cast<uintptr_t>(holder_->ptr()) + offset_);
Y
Yi Wang 已提交
44 45
  }

F
fengjiayi 已提交
46
  template <typename T>
F
fengjiayi 已提交
47
  T* mutable_data(DDim dims, platform::Place place) {
F
fengjiayi 已提交
48
    set_dims(dims);
49 50 51
    return mutable_data<T>(place);
  }

F
fengjiayi 已提交
52
  template <typename T>
F
fengjiayi 已提交
53
  T* mutable_data(platform::Place place) {
F
fengjiayi 已提交
54 55
    PADDLE_ENFORCE(numel_ > 0,
                   "Tensor::numel_ must be larger than zero to call "
F
fengjiayi 已提交
56
                   "Tensor::mutable_data. Call Tensor::set_dim first.");
F
fengjiayi 已提交
57
    if (holder_ == nullptr ||
F
fengjiayi 已提交
58
        !(holder_->place() ==
F
fengjiayi 已提交
59
          place) /* some versions of boost::variant don't have operator!= */
F
fengjiayi 已提交
60
        || holder_->size() < numel_ * sizeof(T) + offset_) {
61
#ifdef __CUDACC__
F
fengjiayi 已提交
62 63 64 65 66 67 68 69 70 71 72
      switch (place.which()) {
        case 0:
          holder_.reset(new PlaceholderImpl<T, platform::GPUPlace>(
              boost::get<platform::GPUPlace>(place), numel_ * sizeof(T)));
          break;

        case 1:
          holder_.reset(new PlaceholderImpl<T, platform::CPUPlace>(
              boost::get<platform::CPUPlace>(place), numel_ * sizeof(T)));
          break;
      }
73 74 75 76
#else
      holder_.reset(new PlaceholderImpl<T, platform::CPUPlace>(
          boost::get<platform::CPUPlace>(place), numel_ * sizeof(T)));
#endif
F
fengjiayi 已提交
77

78
      offset_ = 0;
Y
Yi Wang 已提交
79
    }
F
fengjiayi 已提交
80
    return reinterpret_cast<T*>(reinterpret_cast<uintptr_t>(holder_->ptr()) +
81
                                offset_);
Y
Yi Wang 已提交
82 83
  }

F
fengjiayi 已提交
84
  template <typename T>
85
  void ShareDataFrom(const Tensor& src) {
F
fengjiayi 已提交
86
    src.CheckDims<T>();
87
    holder_ = src.holder_;
F
fengjiayi 已提交
88
    set_dims(src.dims());
89
    offset_ = src.offset_;
Y
Yi Wang 已提交
90 91
  }

F
fengjiayi 已提交
92
  template <typename T>
F
fengjiayi 已提交
93
  void CopyFrom(const Tensor& src, platform::Place dst_place) {
F
fengjiayi 已提交
94 95 96
    PADDLE_ENFORCE(platform::is_cpu_place(src.holder_->place()) &&
                       platform::is_cpu_place(dst_place),
                   "Tensor::CopyFrom only support CPU now.");
F
fengjiayi 已提交
97
    src.CheckDims<T>();
F
fengjiayi 已提交
98 99
    size_t size = src.numel_ * sizeof(T);
    set_dims(src.dims());
F
fengjiayi 已提交
100
    const void* src_ptr = static_cast<const void*>(src.data<T>());
F
fengjiayi 已提交
101 102
    void* dst_ptr = static_cast<void*>(mutable_data<T>(dst_place));
    memcpy(dst_ptr, src_ptr, size);
103 104
  }

F
fengjiayi 已提交
105
  template <typename T>
F
fengjiayi 已提交
106
  Tensor Slice(const int& begin_idx, const int& end_idx) const {
F
fengjiayi 已提交
107
    CheckDims<T>();
108 109 110 111 112 113 114 115 116 117 118 119
    PADDLE_ENFORCE(begin_idx >= 0 && end_idx <= dims_[0],
                   "Slice index is less than zero or out of bound.");
    PADDLE_ENFORCE(begin_idx < end_idx,
                   "Begin index must be less than end index.");
    PADDLE_ENFORCE(dims_[0] != 1, "Can not slice a tensor with dims_[0] = 1.");
    std::vector<int> d = vectorize(dims_);
    int base = 1;
    for (size_t i = 1; i < d.size(); ++i) {
      base *= d[i];
    }
    Tensor dst;
    dst.holder_ = holder_;
F
fengjiayi 已提交
120 121 122
    DDim dst_dims = dims_;
    dst_dims[0] = end_idx - begin_idx;
    dst.set_dims(dst_dims);
F
fengjiayi 已提交
123
    dst.offset_ = offset_ + begin_idx * base * sizeof(T);
124 125 126
    return dst;
  }

F
fengjiayi 已提交
127 128 129 130 131 132 133 134
  void set_dims(const DDim& dims) {
    if (dims == dims_) {
      return;
    }
    dims_ = dims;
    numel_ = product(dims_);
  }

135 136
  DDim dims() const { return dims_; }

Y
Yi Wang 已提交
137 138 139 140 141
 private:
  // Placeholder hides type T, so it doesn't appear as a template
  // parameter of Variable.
  struct Placeholder {
    virtual ~Placeholder() {}
F
fengjiayi 已提交
142
    virtual void* ptr() const = 0;
F
fengjiayi 已提交
143
    virtual platform::Place place() const = 0;
F
fengjiayi 已提交
144
    virtual size_t size() const = 0;
Y
Yu Yang 已提交
145
    virtual std::type_index type() const = 0;
Y
Yi Wang 已提交
146 147
  };

F
fengjiayi 已提交
148
  template <typename T, typename PlaceType>
Y
Yi Wang 已提交
149
  struct PlaceholderImpl : public Placeholder {
150
   private:
F
fengjiayi 已提交
151
    template <typename PType>
152 153
    class Deleter {
     public:
F
fengjiayi 已提交
154 155
      Deleter(PType place) : place_(place) {}
      void operator()(T* ptr) { memory::Free(place_, static_cast<void*>(ptr)); }
156 157

     private:
F
fengjiayi 已提交
158
      PType place_;
159 160 161
    };

   public:
F
fengjiayi 已提交
162 163 164
    PlaceholderImpl(PlaceType place, size_t size)
        : ptr_(static_cast<T*>(memory::Alloc(place, size)),
               Deleter<PlaceType>(place)),
165
          place_(place),
Y
Yi Wang 已提交
166 167
          size_(size) {}

F
fengjiayi 已提交
168 169
    virtual void* ptr() const { return static_cast<void*>(ptr_.get()); }
    virtual size_t size() const { return size_; }
Y
Yu Yang 已提交
170 171
    virtual paddle::platform::Place place() const { return place_; }
    virtual std::type_index type() const { return std::type_index(typeid(T)); }
Y
Yi Wang 已提交
172

F
fengjiayi 已提交
173 174 175
    std::unique_ptr<T, Deleter<PlaceType>> ptr_;
    platform::Place place_;  // record the place of ptr_.
    size_t size_;            // size of the memory block.
Y
Yi Wang 已提交
176 177
  };

F
fengjiayi 已提交
178
  template <typename T>
F
fengjiayi 已提交
179
  inline void CheckDims() const {
F
fengjiayi 已提交
180 181
    PADDLE_ENFORCE(holder_ != nullptr,
                   "Tenosr holds no memory. Call Tensor::mutable_data first.");
F
fengjiayi 已提交
182
    PADDLE_ENFORCE(holder_->size() >= numel_ * sizeof(T) + offset_,
F
fengjiayi 已提交
183 184 185 186
                   "Tensor's dims_ is out of bound. Call Tensor::mutable_data "
                   "first to re-allocate memory.");
  }

187
  std::shared_ptr<Placeholder> holder_;  // holds the memory block if allocated.
188
  DDim dims_;
F
fengjiayi 已提交
189
  size_t numel_;   // cache of `product(dims_)`
190
  size_t offset_;  // marks the begin of tensor data area.
191 192 193
  template <bool less, size_t i, typename... args>
  friend struct paddle::pybind::details::CastToPyBufferImpl;
};  // namespace framework
Y
Yi Wang 已提交
194 195 196

}  // namespace framework
}  // namespace paddle