tensor.h 3.4 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yi Wang 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15 16
#pragma once

17
#include <cstdint>
18
#include <cstring>
F
fengjiayi 已提交
19
#include <memory>
Y
Yu Yang 已提交
20
#include <typeindex>
21
#include <utility>
22
#include <vector>
W
wanghuancoder 已提交
23

Y
Yi Wang 已提交
24 25
#include "paddle/fluid/framework/data_layout.h"
#include "paddle/fluid/framework/ddim.h"
Y
Yu Yang 已提交
26
#include "paddle/fluid/framework/framework.pb.h"
Y
Yi Wang 已提交
27 28 29 30
#include "paddle/fluid/memory/memory.h"
#include "paddle/fluid/platform/device_context.h"
#include "paddle/fluid/platform/enforce.h"
#include "paddle/fluid/platform/place.h"
31
#include "paddle/fluid/platform/stream/stream.h"
F
fengjiayi 已提交
32

33 34
#include "paddle/pten/core/dense_tensor.h"

Y
Yi Wang 已提交
35
namespace paddle {
L
liaogang 已提交
36

37
namespace framework {
Y
Yi Wang 已提交
38

39 40
class LoDTensor;

41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
/*
 NOTE(liym27): [ What is TensorInplaceVersion used for? ]

 TensorInplaceVersion is a version counter and every Tensor has a version
 counter. It's used to check whether an inplace operation will result in an
 incorrect gradient calculation. Version is incremented when the data of the
 Variable is modified in place.

 - Question: In what scenarios will version counters be shared?
 - Answer: When two Variables/VarBases share the same C++ Tensor(its Allocation
 may change), both of them share the same version counter. For examples:
  1. `z = paddle.assign(input=x, output=y)`, `z` shares the same version counter
    of `y` because z and y is the same VarBase;
  2. `y = x.detach()`, `y` shares the same version counter of `x`.

 - Question: In what scenarios will version counters NOT be shared?
 - Answer: Replacing a `Variable`'s data by calling `Tensor::ShareDataWith(...)`
 or `Tensor::ShareBufferWith(...)`. Because they share the same Allocation but
 not framework::Tensor.

 - Question: Why put the inplace_version_counter_ in framework::Tensor instead
 of Allocation or Variable?
 - Answer:
  1. Tensor can call ResetHolder() to reset the corresponding Allocation so that
  the inplace_version_counter_ changes if it's in Allocation, which will lead to
  confusing information about inplace version.
  2. If inplace_version_counter_ is in Variable, different VariableWrappers
  should be able to share the same Variable. However, a VariableWrapper hold a
  Variable object but not a pointer.
*/

72
class Tensor : public pten::DenseTensor {
73
 public:
74 75
  using DenseTensor = pten::DenseTensor;
  using DenseTensor::DenseTensor;
L
liaogang 已提交
76 77

  /*! The internal of two tensors share the same memory block. */
78
  Tensor& ShareDataWith(const Tensor& src);
L
liaogang 已提交
79

80 81 82
  /*! The internal of two tensors share the same inplace version counter. */
  Tensor& ShareInplaceVersionCounterWith(const Tensor& src);

C
chengduo 已提交
83
  Tensor Slice(int64_t begin_idx, int64_t end_idx) const;
84

85 86 87 88
  std::vector<Tensor> Split(int64_t split_size, int64_t axis) const;

  std::vector<Tensor> Chunk(int64_t chunks, int64_t axis) const;

89 90 91
  Tensor& Resize(const DDim& dims) {
    meta_.dims = dims;
    return *this;
Y
Yu Yang 已提交
92
  }
93
};
Y
Yi Wang 已提交
94 95 96

}  // namespace framework
}  // namespace paddle
L
liaogang 已提交
97

Y
Yi Wang 已提交
98
#include "paddle/fluid/framework/tensor_impl.h"