tensor.h 5.6 KB
Newer Older
Y
Yi Wang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15 16
#pragma once

17
#include <cstdint>
18
#include <cstring>
F
fengjiayi 已提交
19 20 21 22 23 24
#include <memory>
#include "paddle/framework/ddim.h"
#include "paddle/framework/enforce.h"
#include "paddle/memory/memory.h"
#include "paddle/platform/place.h"

Y
Yi Wang 已提交
25 26 27 28 29
namespace paddle {
namespace framework {

class Tensor {
 public:
F
fengjiayi 已提交
30
  Tensor() : numel_(0), offset_(0) {}
31

Y
Yi Wang 已提交
32 33
  template <typename T>
  const T* data() const {
F
fengjiayi 已提交
34
    CheckDims<T>();
35
    return reinterpret_cast<const T*>(
F
fengjiayi 已提交
36
        reinterpret_cast<uintptr_t>(holder_->ptr()) + offset_);
Y
Yi Wang 已提交
37 38
  }

F
fengjiayi 已提交
39
  template <typename T>
F
fengjiayi 已提交
40
  T* mutable_data(DDim dims, platform::Place place) {
F
fengjiayi 已提交
41
    set_dims(dims);
42 43 44
    return mutable_data<T>(place);
  }

F
fengjiayi 已提交
45
  template <typename T>
F
fengjiayi 已提交
46
  T* mutable_data(platform::Place place) {
F
fengjiayi 已提交
47 48
    PADDLE_ENFORCE(numel_ > 0,
                   "Tensor::numel_ must be larger than zero to call "
F
fengjiayi 已提交
49
                   "Tensor::mutable_data. Call Tensor::set_dim first.");
F
fengjiayi 已提交
50
    if (holder_ == nullptr ||
F
fengjiayi 已提交
51
        !(holder_->place() ==
F
fengjiayi 已提交
52
          place) /* some versions of boost::variant don't have operator!= */
F
fengjiayi 已提交
53
        || holder_->size() < numel_ * sizeof(T) + offset_) {
F
fengjiayi 已提交
54 55 56 57 58 59 60 61 62 63 64 65
      switch (place.which()) {
        case 0:
          holder_.reset(new PlaceholderImpl<T, platform::GPUPlace>(
              boost::get<platform::GPUPlace>(place), numel_ * sizeof(T)));
          break;

        case 1:
          holder_.reset(new PlaceholderImpl<T, platform::CPUPlace>(
              boost::get<platform::CPUPlace>(place), numel_ * sizeof(T)));
          break;
      }

66
      offset_ = 0;
Y
Yi Wang 已提交
67
    }
F
fengjiayi 已提交
68
    return reinterpret_cast<T*>(reinterpret_cast<uintptr_t>(holder_->ptr()) +
69
                                offset_);
Y
Yi Wang 已提交
70 71
  }

F
fengjiayi 已提交
72
  template <typename T>
73
  void ShareDataFrom(const Tensor& src) {
F
fengjiayi 已提交
74
    src.CheckDims<T>();
75
    holder_ = src.holder_;
F
fengjiayi 已提交
76
    set_dims(src.dims());
77
    offset_ = src.offset_;
Y
Yi Wang 已提交
78 79
  }

F
fengjiayi 已提交
80
  template <typename T>
F
fengjiayi 已提交
81
  void CopyFrom(const Tensor& src, platform::Place dst_place) {
F
fengjiayi 已提交
82 83 84
    PADDLE_ENFORCE(platform::is_cpu_place(src.holder_->place()) &&
                       platform::is_cpu_place(dst_place),
                   "Tensor::CopyFrom only support CPU now.");
F
fengjiayi 已提交
85
    src.CheckDims<T>();
F
fengjiayi 已提交
86 87
    size_t size = src.numel_ * sizeof(T);
    set_dims(src.dims());
F
fengjiayi 已提交
88
    const void* src_ptr = static_cast<const void*>(src.data<T>());
F
fengjiayi 已提交
89 90
    void* dst_ptr = static_cast<void*>(mutable_data<T>(dst_place));
    memcpy(dst_ptr, src_ptr, size);
91 92
  }

F
fengjiayi 已提交
93
  template <typename T>
F
fengjiayi 已提交
94
  Tensor Slice(const int& begin_idx, const int& end_idx) const {
F
fengjiayi 已提交
95
    CheckDims<T>();
96 97 98 99 100 101 102 103 104 105 106 107
    PADDLE_ENFORCE(begin_idx >= 0 && end_idx <= dims_[0],
                   "Slice index is less than zero or out of bound.");
    PADDLE_ENFORCE(begin_idx < end_idx,
                   "Begin index must be less than end index.");
    PADDLE_ENFORCE(dims_[0] != 1, "Can not slice a tensor with dims_[0] = 1.");
    std::vector<int> d = vectorize(dims_);
    int base = 1;
    for (size_t i = 1; i < d.size(); ++i) {
      base *= d[i];
    }
    Tensor dst;
    dst.holder_ = holder_;
F
fengjiayi 已提交
108 109 110
    DDim dst_dims = dims_;
    dst_dims[0] = end_idx - begin_idx;
    dst.set_dims(dst_dims);
F
fengjiayi 已提交
111
    dst.offset_ = offset_ + begin_idx * base * sizeof(T);
112 113 114
    return dst;
  }

F
fengjiayi 已提交
115 116 117 118 119 120 121 122
  void set_dims(const DDim& dims) {
    if (dims == dims_) {
      return;
    }
    dims_ = dims;
    numel_ = product(dims_);
  }

123 124
  DDim dims() const { return dims_; }

Y
Yi Wang 已提交
125 126 127 128 129
 private:
  // Placeholder hides type T, so it doesn't appear as a template
  // parameter of Variable.
  struct Placeholder {
    virtual ~Placeholder() {}
F
fengjiayi 已提交
130
    virtual void* ptr() const = 0;
F
fengjiayi 已提交
131
    virtual platform::Place place() const = 0;
F
fengjiayi 已提交
132
    virtual size_t size() const = 0;
Y
Yi Wang 已提交
133 134
  };

F
fengjiayi 已提交
135
  template <typename T, typename PlaceType>
Y
Yi Wang 已提交
136
  struct PlaceholderImpl : public Placeholder {
137
   private:
F
fengjiayi 已提交
138
    template <typename PType>
139 140
    class Deleter {
     public:
F
fengjiayi 已提交
141 142
      Deleter(PType place) : place_(place) {}
      void operator()(T* ptr) { memory::Free(place_, static_cast<void*>(ptr)); }
143 144

     private:
F
fengjiayi 已提交
145
      PType place_;
146 147 148
    };

   public:
F
fengjiayi 已提交
149 150 151
    PlaceholderImpl(PlaceType place, size_t size)
        : ptr_(static_cast<T*>(memory::Alloc(place, size)),
               Deleter<PlaceType>(place)),
152
          place_(place),
Y
Yi Wang 已提交
153 154
          size_(size) {}

F
fengjiayi 已提交
155 156
    virtual void* ptr() const { return static_cast<void*>(ptr_.get()); }
    virtual size_t size() const { return size_; }
F
fengjiayi 已提交
157
    virtual platform::Place place() const { return place_; }
Y
Yi Wang 已提交
158

F
fengjiayi 已提交
159 160 161
    std::unique_ptr<T, Deleter<PlaceType>> ptr_;
    platform::Place place_;  // record the place of ptr_.
    size_t size_;            // size of the memory block.
Y
Yi Wang 已提交
162 163
  };

F
fengjiayi 已提交
164
  template <typename T>
F
fengjiayi 已提交
165
  inline void CheckDims() const {
F
fengjiayi 已提交
166 167
    PADDLE_ENFORCE(holder_ != nullptr,
                   "Tenosr holds no memory. Call Tensor::mutable_data first.");
F
fengjiayi 已提交
168
    PADDLE_ENFORCE(holder_->size() >= numel_ * sizeof(T) + offset_,
F
fengjiayi 已提交
169 170 171 172
                   "Tensor's dims_ is out of bound. Call Tensor::mutable_data "
                   "first to re-allocate memory.");
  }

173
  std::shared_ptr<Placeholder> holder_;  // holds the memory block if allocated.
174
  DDim dims_;
F
fengjiayi 已提交
175
  size_t numel_;   // cache of `product(dims_)`
176
  size_t offset_;  // marks the begin of tensor data area.
F
fengjiayi 已提交
177
};                 // namespace framework
Y
Yi Wang 已提交
178 179 180

}  // namespace framework
}  // namespace paddle