eager_properties.cc 7.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/* Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
// disable numpy compile error
#include <Python.h>

#include <string>
#include <vector>

17
#include "paddle/fluid/eager/accumulation/accumulation_node.h"
18
#include "paddle/fluid/eager/api/all.h"
19
#include "paddle/fluid/eager/api/utils/tensor_utils.h"
20 21 22 23 24 25 26 27
#include "paddle/fluid/eager/autograd_meta.h"
#include "paddle/fluid/eager/utils.h"
#include "paddle/fluid/memory/allocation/allocator.h"
#include "paddle/fluid/memory/memcpy.h"
#include "paddle/fluid/platform/enforce.h"
#include "paddle/fluid/pybind/eager.h"
#include "paddle/fluid/pybind/eager_utils.h"
#include "paddle/fluid/pybind/exception.h"
28 29 30
#include "paddle/phi/common/data_type.h"
#include "paddle/phi/core/compat/convert_utils.h"
#include "paddle/phi/core/dense_tensor.h"
31 32 33 34 35
#pragma GCC diagnostic ignored "-Wwrite-strings"

namespace paddle {
namespace pybind {

36
extern PyTypeObject* p_tensor_type;
37

38 39
PyObject* tensor_properties_get_name(TensorObject* self, void* closure) {
  EAGER_TRY
40
  return ToPyObject(self->tensor.name());
41 42 43
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

44 45
PyObject* tensor_properties_get_type(TensorObject* self, void* closure) {
  EAGER_TRY
46 47 48 49
  if (!self->tensor.defined()) {
    // be same to old dygraph
    return ToPyObject(paddle::framework::proto::VarType::LOD_TENSOR);
  }
50
  if (self->tensor.is_dense_tensor()) {
51
    return ToPyObject(paddle::framework::proto::VarType::LOD_TENSOR);
52 53
  } else if (self->tensor.is_selected_rows()) {
    return ToPyObject(paddle::framework::proto::VarType::SELECTED_ROWS);
54 55 56 57 58 59 60
  } else {
    Py_INCREF(Py_None);
    return Py_None;
  }
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

W
wanghuancoder 已提交
61 62 63 64 65 66
PyObject* tensor_properties_is_leaf(TensorObject* self, void* closure) {
  EAGER_TRY
  return ToPyObject(egr::egr_utils_api::IsLeafTensor(self->tensor));
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

67 68 69
int tensor_properties_set_name(TensorObject* self, PyObject* value,
                               void* closure) {
  EAGER_TRY
70
  self->tensor.set_name(CastPyArg2AttrString(value, 0));
71
  return 0;
0
0x45f 已提交
72
  EAGER_CATCH_AND_THROW_RETURN_NEG
73 74
}

75 76 77
PyObject* tensor_properties_get_stop_gradient(TensorObject* self,
                                              void* closure) {
  EAGER_TRY
78
  auto meta = egr::EagerUtils::autograd_meta(&self->tensor);
79 80 81 82
  return ToPyObject(meta->StopGradient());
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

83 84
PyObject* tensor_properties_get_grad(TensorObject* self, void* closure) {
  EAGER_TRY
85 86
  VLOG(6) << "Get grad for tensor: " << self->tensor.name();
  auto meta = egr::EagerUtils::nullable_autograd_meta(self->tensor);
87
  if (meta && meta->Grad().initialized()) {
88
    return ToPyObject(meta->Grad());
89
  } else {
90 91
    Py_INCREF(Py_None);
    return Py_None;
92
  }
93 94 95
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

96 97 98
int tensor_properties_set_grad(TensorObject* self, PyObject* value,
                               void* closure) {
  EAGER_TRY
99
  auto src = CastPyArg2Tensor(value, 0);
100
  PADDLE_ENFORCE(
101
      egr::egr_utils_api::IsLeafTensor(self->tensor),
102
      paddle::platform::errors::Fatal("Only leaf Tensor can be set grad."));
103 104 105 106 107 108 109 110

  paddle::experimental::Tensor* grad =
      egr::EagerUtils::mutable_grad(self->tensor);
  PADDLE_ENFORCE(grad != nullptr,
                 paddle::platform::errors::Fatal(
                     "Detected NULL grad"
                     "Please check if you have manually cleared"
                     "the grad inside autograd_meta"));
C
Chen Weihang 已提交
111
  grad->copy_(src, self->tensor.place(), true);
112
  return 0;
0
0x45f 已提交
113
  EAGER_CATCH_AND_THROW_RETURN_NEG
114 115
}

116 117 118
int tensor_properties_set_stop_gradient(TensorObject* self, PyObject* value,
                                        void* closure) {
  EAGER_TRY
119
  auto meta = egr::EagerUtils::autograd_meta(&self->tensor);
120
  meta->SetStopGradient(CastPyArg2AttrBoolean(value, 0));
121 122 123
  if (!meta->GradNode()) {
    meta->SetGradNode(std::make_shared<egr::GradNodeAccumulation>(meta));
  }
124
  return 0;
0
0x45f 已提交
125
  EAGER_CATCH_AND_THROW_RETURN_NEG
126 127
}

128 129
PyObject* tensor_properties_get_persistable(TensorObject* self, void* closure) {
  EAGER_TRY
130
  auto meta = egr::EagerUtils::autograd_meta(&self->tensor);
131 132 133 134
  return ToPyObject(meta->Persistable());
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

135 136 137
int tensor_properties_set_persistable(TensorObject* self, PyObject* value,
                                      void* closure) {
  EAGER_TRY
138
  auto meta = egr::EagerUtils::autograd_meta(&self->tensor);
139 140
  meta->SetPersistable(CastPyArg2AttrBoolean(value, 0));
  return 0;
0
0x45f 已提交
141
  EAGER_CATCH_AND_THROW_RETURN_NEG
142 143
}

144 145
PyObject* tensor_properties_get_shape(TensorObject* self, void* closure) {
  EAGER_TRY
146
  std::vector<int64_t> value;
147 148 149 150
  if (!self->tensor.defined()) {
    return ToPyObject(value);
  }
  auto ddim = self->tensor.shape();
151 152 153 154 155 156 157 158 159 160
  size_t rank = static_cast<size_t>(ddim.size());
  value.resize(rank);
  for (size_t i = 0; i < rank; i++) {
    value[i] = ddim[i];
  }

  return ToPyObject(value);
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

161 162
PyObject* tensor_properties_get_place(TensorObject* self, void* closure) {
  EAGER_TRY
C
Chen Weihang 已提交
163
  return ToPyObject(self->tensor.place());
164 165 166
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

167 168
PyObject* tensor_properties_get_place_str(TensorObject* self, void* closure) {
  EAGER_TRY
169
  std::stringstream ostr;
C
Chen Weihang 已提交
170
  ostr << self->tensor.place();
171 172 173 174
  return ToPyObject(ostr.str());
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

175 176
PyObject* tensor_properties_get_dtype(TensorObject* self, void* closure) {
  EAGER_TRY
177 178 179 180
  if (!self->tensor.defined()) {
    // be same to old dygraph
    return ToPyObject(framework::proto::VarType::FP32);
  }
181 182
  return ToPyObject(
      paddle::framework::TransToProtoVarType(self->tensor.type()));
183 184 185 186
  EAGER_CATCH_AND_THROW_RETURN_NULL
}

struct PyGetSetDef variable_properties[] = {
187 188 189 190 191 192 193 194 195 196
    {"grad", (getter)tensor_properties_get_grad,
     (setter)tensor_properties_set_grad, nullptr, nullptr},
    {"name", (getter)tensor_properties_get_name,
     (setter)tensor_properties_set_name, nullptr, nullptr},
    {"stop_gradient", (getter)tensor_properties_get_stop_gradient,
     (setter)tensor_properties_set_stop_gradient, nullptr, nullptr},
    {"persistable", (getter)tensor_properties_get_persistable,
     (setter)tensor_properties_set_persistable, nullptr, nullptr},
    {"shape", (getter)tensor_properties_get_shape, nullptr, nullptr, nullptr},
    // {"is_leaf", (getter)tensor_properties_get_is_leaf, nullptr,
197 198
    // nullptr,
    //  nullptr},
199 200
    {"place", (getter)tensor_properties_get_place, nullptr, nullptr, nullptr},
    {"_place_str", (getter)tensor_properties_get_place_str, nullptr, nullptr,
201
     nullptr},
202 203
    {"dtype", (getter)tensor_properties_get_dtype, nullptr, nullptr, nullptr},
    {"type", (getter)tensor_properties_get_type, nullptr, nullptr, nullptr},
W
wanghuancoder 已提交
204
    {"is_leaf", (getter)tensor_properties_is_leaf, nullptr, nullptr, nullptr},
205 206 207 208
    {nullptr, nullptr, nullptr, nullptr, nullptr}};

}  // namespace pybind
}  // namespace paddle