device_context.h 5.3 KB
Newer Older
W
Wilber 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
/* Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

#include <memory>

19
#include "paddle/phi/api/include/dll_decl.h"
20 21 22
#include "paddle/phi/common/data_type.h"
#include "paddle/phi/common/place.h"
#include "paddle/phi/core/allocator.h"
23
#include "paddle/phi/core/distributed/comm_context.h"
24
#include "paddle/phi/core/generator.h"
25
#include "paddle/phi/core/utils/type_registry.h"
W
Wilber 已提交
26

27
namespace phi {
W
Wilber 已提交
28 29 30 31 32 33 34 35
class TensorBase;

/**
 * DeviceContext provides device-related interfaces.
 *
 * All kernels must access the interfaces provided by the backend through
 * DeviceContext.
 */
36
class PADDLE_API DeviceContext {
W
Wilber 已提交
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
 public:
  /**
   * @brief Default construct.
   */
  DeviceContext();

  /**
   * @brief Copy construct.
   */
  DeviceContext(const DeviceContext&);

  /**
   * @brief Move construct.
   */
  DeviceContext(DeviceContext&&);

53 54 55 56 57
  /**
   * @brief Move assign operator.
   */
  DeviceContext& operator=(DeviceContext&&);

W
Wilber 已提交
58 59 60 61 62 63
  /**
   * @brief Default destruct.
   */
  virtual ~DeviceContext();

  /**
64
   * @brief Set the device-related Allocator object.
W
Wilber 已提交
65 66 67
   *
   * @param allocator
   */
W
Wilber 已提交
68
  void SetAllocator(const Allocator*);
W
Wilber 已提交
69 70

  /**
71
   * @brief Set the host Allocator object.
W
Wilber 已提交
72
   *
73
   * @param allocator
W
Wilber 已提交
74
   */
75
  void SetHostAllocator(const Allocator*);
W
Wilber 已提交
76 77

  /**
78 79 80 81
   * @brief Set the zero-size Allocator object.
   *
   * @param allocator
   */
82
  void SetZeroAllocator(const Allocator*);
W
Wilber 已提交
83

84 85 86 87 88 89 90
  /**
   * @brief Set the zero-size host Allocator object.
   *
   * @param allocator
   */
  void SetHostZeroAllocator(const Allocator*);

W
wanghuancoder 已提交
91
  /**
92 93 94 95
   * @brief Set the zero-size Allocator object.
   *
   * @param allocator
   */
W
wanghuancoder 已提交
96 97
  void SetPinnedAllocator(const Allocator*);

W
Wilber 已提交
98
  /**
99
   * @brief Get the const Allocator object.
W
Wilber 已提交
100
   *
101
   * @return Allocator
W
Wilber 已提交
102
   */
W
Wilber 已提交
103
  const Allocator& GetAllocator() const;
W
Wilber 已提交
104 105

  /**
106
   * @brief Get the const device-related Allocator object.
W
Wilber 已提交
107 108 109 110 111
   *
   * @return Allocator
   */
  const Allocator& GetHostAllocator() const;

112 113
  const Allocator& GetZeroAllocator() const;

114 115
  const Allocator& GetHostZeroAllocator() const;

W
wanghuancoder 已提交
116 117
  const Allocator& GetPinnedAllocator() const;

Y
Yuang Liu 已提交
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144
#ifdef PADDLE_WITH_CUDA
  /**
   * @brief Set the CUDA graph Allocator object.
   *
   * @param allocator
   */
  void SetCUDAGraphAllocator(const Allocator*);

  /**
   * @brief Get the const CUDA graph Allocator object.
   *
   * @return Allocator
   */
  const Allocator& GetCUDAGraphAllocator() const;

  /**
   * @brief Test whether the CUDA graph allocator is valid
   *
   * This method should be called before calling GetCUDAGraphAllocator().
   * Other unit can calls GetCUDAGraphAllocator() method,
   * only when this method returns True!
   *
   * @return true if cuda_graph_allocator_ is valid, false otherwise
   */
  bool IsCUDAGraphAllocatorValid() const;
#endif

145 146 147
  /**
   * @brief Allocate device memory for tensor.
   */
148 149 150 151 152
  virtual void* Alloc(TensorBase*,
                      DataType dtype,
                      size_t requested_size = 0,
                      bool pinned = false,
                      bool fake_alloc = false) const;
153 154

  template <typename T>
W
wanghuancoder 已提交
155 156 157
  T* Alloc(TensorBase* tensor,
           size_t requested_size = 0,
           bool pinned = false) const;
158

W
Wilber 已提交
159 160 161
  /**
   * @brief Allocate host memory for tensor.
   */
162
  void* HostAlloc(TensorBase* tensor,
163
                  DataType dtype,
164 165
                  size_t requested_size = 0,
                  bool fake_alloc = false) const;
166 167 168

  template <typename T>
  T* HostAlloc(TensorBase* tensor, size_t requested_size = 0) const;
W
Wilber 已提交
169

W
Wilber 已提交
170
  virtual const Place& GetPlace() const = 0;
171

W
Wilber 已提交
172 173 174 175
  // TODO(wilber): The fluid framework uses wait() in many places, how to delete
  // this API interface.
  virtual void Wait() const {}

W
Wilber 已提交
176
  /**
177 178 179 180
   * @brief Set the generator for special op.
   *
   * @param Generator
   */
W
Wilber 已提交
181 182 183 184 185 186 187 188
  void SetGenerator(Generator*);
  /**
   * @brief Get the generator object.
   *
   * @return Generator
   */
  Generator* GetGenerator() const;

L
Leo Chen 已提交
189
  /**
190 191 192 193
   * @brief Set the host generator for special op.
   *
   * @param Generator
   */
L
Leo Chen 已提交
194 195 196 197 198 199 200 201
  void SetHostGenerator(Generator*);
  /**
   * @brief Get the host generator object.
   *
   * @return Generator
   */
  Generator* GetHostGenerator() const;

202 203 204 205 206 207 208 209
  /**
   * @brief Return the type information of the derived class to support
   *        safely downcast in non-rtti environment.
   *
   * @return The type information of the derived class.
   */
  TypeInfo<DeviceContext> type_info() const { return type_info_; }

210 211 212 213 214 215 216 217 218 219 220 221 222 223
  /**
   * @brief Set the comm context point.
   *
   * @param CommContext
   */
  void SetCommContext(distributed::CommContext* comm_context);

  /**
   * @brief Get the comm context point.
   *
   * @return comm context point
   */
  distributed::CommContext* GetCommContext() const;

W
Wilber 已提交
224 225 226
 private:
  struct Impl;
  std::unique_ptr<Impl> impl_;
227 228 229 230

  template <typename T, typename U>
  friend class TypeInfoTraits;
  TypeInfo<DeviceContext> type_info_{TypeInfo<DeviceContext>::kUnknownType};
W
Wilber 已提交
231 232
};

233
}  // namespace phi