accumulation_node.h 2.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

17
#include "paddle/fluid/eager/autograd_meta.h"
18
#include "paddle/fluid/eager/grad_node_info.h"
19
#include "paddle/fluid/eager/hooks.h"
20 21 22 23 24 25

namespace egr {

class GradNodeAccumulation : public GradNodeBase {
 public:
  // Constructor: configure fwd input tensors to grad node
26
  explicit GradNodeAccumulation(AutogradMeta* meta) : GradNodeBase(1, 1) {
J
Jiabin Yang 已提交
27
    VLOG(6) << "Construct GradNodeAccumulation";
28 29 30
    weak_grad_ = meta->WeakGrad();
    SetDefaultGradInOutMeta();
  }
31

J
Jiabin Yang 已提交
32 33 34
  ~GradNodeAccumulation() override {
    VLOG(6) << "Destruct GradNodeAccumulation";
  }
35 36

  // Functor: perform backward computations
37
  virtual std::vector<std::vector<paddle::experimental::Tensor>> operator()(
38 39 40 41 42 43 44 45 46
      const std::vector<std::vector<paddle::experimental::Tensor>>& grads,
      bool create_graph = false) override;

  void ClearTensorWrappers() override { VLOG(6) << "Do nothing here now"; }

  bool IsTensorWrappersCleared() override {
    VLOG(6) << "Do nothing here now";
    return false;
  }
47

48 49
  std::string name() { return "GradNodeAccumulation"; }

50 51 52
  /**
   * Register ReduceHook
   * **/
53
  void RegisterReduceHook(std::shared_ptr<TensorVoidHook>&& hook);
54 55 56 57 58 59 60

  /**
   * Apply ReduceHook here
   * **/
  inline bool ReduceHooksRegistered() { return reduce_hooks_.size() != 0; }
  void ApplyReduceHooks();

61
 private:
62
  std::weak_ptr<paddle::experimental::Tensor> weak_grad_;
63

64 65 66
  std::function<paddle::experimental::Tensor(
      const paddle::experimental::Tensor&)>
      retain_grad_hook_;
67

68
  std::vector<std::shared_ptr<TensorVoidHook>> reduce_hooks_;
69 70 71
};

}  // namespace egr