variable_wrapper.h 8.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

17
#include <memory>
18
#include <string>
19 20
#include <utility>

21
#include "paddle/fluid/framework/variable.h"
22
#include "paddle/fluid/imperative/hooks.h"
23 24 25 26

namespace paddle {
namespace imperative {

27 28
class InteriorVarHookPipeline;
class LeafVarHookPipeline;
29 30 31
class VarBase;
class GradOpNode;

32 33
class VariableWrapper {
 public:
34 35
  friend class VarBase;

36 37 38 39 40 41 42 43 44
  explicit VariableWrapper(const std::string& name) : name_(name) {}

  const framework::Variable& Var() const { return var_; }

  framework::Variable* MutableVar() { return &var_; }

  // This is used for python api
  void SetOverridedStopGradient(bool stop_gradient) {
    overrided_stop_gradient_ = static_cast<int>(stop_gradient);
45 46 47 48

    if (auto grad_var = grad_var_.lock()) {
      grad_var->SetOverridedStopGradient(stop_gradient);
    }
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
  }

  // This is used for python api
  bool OverridedStopGradient() const { return overrided_stop_gradient_ != 0; }

  // This is used inside C++
  int InnerOverridedStopGradient() const { return overrided_stop_gradient_; }

  // This is used inside C++
  void InnerSetOverridedStopGradient(bool stop_gradient) {
    if (overrided_stop_gradient_ == -1) {
      overrided_stop_gradient_ = static_cast<int>(stop_gradient);
    } else {
      VLOG(6) << "Ignore Stop gradient conversion for Var: " << Name()
              << "Set value is: " << overrided_stop_gradient_;
    }
65 66 67 68

    if (auto grad_var = grad_var_.lock()) {
      grad_var->InnerSetOverridedStopGradient(stop_gradient);
    }
69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
  }

  void SetPersistable(bool persistable) { persistable_ = persistable; }

  bool Persistable() const { return persistable_; }

  const std::string& Name() const { return name_; }

  void SetName(const std::string& name) { name_ = name; }

  void SetType(framework::proto::VarType::Type type) { type_ = type; }

  framework::proto::VarType::Type Type() const { return type_; }

  void SetDataType(framework::proto::VarType::Type data_type) {
    data_type_ = data_type;
  }

87 88 89 90 91 92 93 94 95 96 97 98
  std::shared_ptr<VariableWrapper> GetGradVar() const {
    return grad_var_.lock();
  }

  const std::weak_ptr<VariableWrapper>& GetWeakGradVar() const {
    return grad_var_;
  }

  std::shared_ptr<GradOpNode> GetGradNode() const { return grad_node_.lock(); }

  bool HasGradNode() const { return !grad_node_.expired(); }

99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118
  framework::proto::VarType::Type DataType() const {
    const framework::Tensor* tensor = nullptr;
    if (var_.IsInitialized()) {
      if (type_ == framework::proto::VarType::LOD_TENSOR) {
        tensor = &(var_.Get<framework::LoDTensor>());
      } else if (type_ == framework::proto::VarType::SELECTED_ROWS) {
        tensor = &(var_.Get<framework::SelectedRows>().value());
      } else {
        VLOG(6) << "Variable " << name_ << " is not initialized";
        return data_type_;
      }
    }
    if (tensor && tensor->IsInitialized()) {
      return tensor->type();
    } else {
      VLOG(6) << "The tensor of variable " << name_ << " is not initialized";
      return data_type_;
    }
  }

119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
  const platform::Place Place() const {
    const framework::Tensor* tensor = nullptr;
    auto place =
        platform::CPUPlace();  // Default place for var not initialized.
    if (var_.IsInitialized()) {
      if (type_ == framework::proto::VarType::LOD_TENSOR) {
        tensor = &(var_.Get<framework::LoDTensor>());
      } else if (type_ == framework::proto::VarType::SELECTED_ROWS) {
        tensor = &(var_.Get<framework::SelectedRows>().value());
      } else {
        VLOG(6) << "Variable " << name_ << " is not initialized";
        return place;
      }
    }
    if (tensor && tensor->IsInitialized()) {
      return tensor->place();
    } else {
      VLOG(6) << "The tensor of variable " << name_ << " is not initialized";
      return place;
    }
  }

141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176
  /* Hook related method: only can be call by GradVarBase */

  bool HasInteriorHooks() const { return interior_hooks_ != nullptr; }

  bool HasLeafHooks() const { return leaf_hooks_ != nullptr; }

  void AddGradVarInteriorHook(std::unique_ptr<OpBasePreHook>&& hook) {
    auto interior_hooks = GetGradVarInteriorHooksSafely();
    interior_hooks->add_hook(std::move(hook));
  }

  void AddGradVarLeafHook(std::unique_ptr<GradAccumulatorPostHook>&& hook) {
    auto leaf_hooks = GetGradVarLeafHooksSafely();
    leaf_hooks->add_hook(std::move(hook));
  }

  void AddGradVarLeafBackwardHook(
      std::unique_ptr<GradAccumulatorPostHook>&& hook) {
    auto leaf_hooks = GetGradVarLeafHooksSafely();
    leaf_hooks->add_backward_hook(std::move(hook));
  }

  const std::shared_ptr<InteriorVarHookPipeline>& GetInteriorHooks() const {
    return interior_hooks_;
  }

  std::shared_ptr<InteriorVarHookPipeline>& GetInteriorHooks() {
    return interior_hooks_;
  }

  const std::shared_ptr<LeafVarHookPipeline>& GetLeafHooks() const {
    return leaf_hooks_;
  }

  std::shared_ptr<LeafVarHookPipeline>& GetLeafHooks() { return leaf_hooks_; }

177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202
 private:
  void SetGradVar(const std::shared_ptr<VariableWrapper>& var) {
    auto shared_var = grad_var_.lock();
    if (shared_var != var) {
      PADDLE_ENFORCE_EQ(shared_var, nullptr,
                        platform::errors::PermissionDenied(
                            "Cannot set gradient var wrapper twice"));
      grad_var_ = var;
    }
  }

  void SetGradNode(const std::shared_ptr<GradOpNode>& grad_node) {
    if (!grad_node) {
      grad_node_.reset();
      return;
    }

    auto shared_node = grad_node_.lock();
    if (shared_node != grad_node) {
      PADDLE_ENFORCE_EQ(
          shared_node, nullptr,
          platform::errors::PermissionDenied("Cannot set gradient op twice"));
      grad_node_ = grad_node;
    }
  }

203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237
  /* Hook related private methods */
  std::shared_ptr<VariableWrapper> GetGradVarSafely() const {
    auto shared_grad_var = grad_var_.lock();
    PADDLE_ENFORCE_NOT_NULL(
        shared_grad_var,
        platform::errors::PermissionDenied(
            "Cannot add gradient hook on Tensor without gradient."));
    return shared_grad_var;
  }

  std::shared_ptr<InteriorVarHookPipeline>& GetGradVarInteriorHooksSafely() {
    auto shared_grad_var = GetGradVarSafely();
    PADDLE_ENFORCE_EQ(HasGradNode(), true,
                      platform::errors::PermissionDenied(
                          "Only interior Tensor in backward can register "
                          "interior gradient hook."));
    if (shared_grad_var->interior_hooks_ == nullptr) {
      shared_grad_var->interior_hooks_ =
          std::make_shared<InteriorVarHookPipeline>();
    }
    return shared_grad_var->interior_hooks_;
  }

  std::shared_ptr<LeafVarHookPipeline>& GetGradVarLeafHooksSafely() {
    auto shared_grad_var = GetGradVarSafely();
    PADDLE_ENFORCE_EQ(
        HasGradNode(), false,
        platform::errors::PermissionDenied(
            "Only leaf Tensor in backward can register leaf gradient hook."));
    if (shared_grad_var->leaf_hooks_ == nullptr) {
      shared_grad_var->leaf_hooks_ = std::make_shared<LeafVarHookPipeline>();
    }
    return shared_grad_var->leaf_hooks_;
  }

238 239 240 241 242 243 244 245 246 247 248
 private:
  framework::Variable var_;
  std::string name_;

  // add this property for users may set stop_gradient themselves and this
  // should override the frameworks setting (-1) unset, (1) true, (0) false
  int overrided_stop_gradient_{-1};
  bool persistable_{false};

  framework::proto::VarType::Type type_{framework::proto::VarType::LOD_TENSOR};
  framework::proto::VarType::Type data_type_{framework::proto::VarType::FP32};
249 250 251

  std::weak_ptr<VariableWrapper> grad_var_;
  std::weak_ptr<GradOpNode> grad_node_;
252 253 254 255 256 257

  // NOTE: only grad var can hold hooks now
  // only interior var can hold interior hooks
  std::shared_ptr<InteriorVarHookPipeline> interior_hooks_;
  // only leaf var can hold leaf hooks
  std::shared_ptr<LeafVarHookPipeline> leaf_hooks_;
258 259 260 261
};

}  // namespace imperative
}  // namespace paddle