// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma once #include "paddle/fluid/eager/autograd_meta.h" #include "paddle/fluid/eager/grad_node_info.h" #include "paddle/fluid/eager/hooks.h" namespace egr { class GradNodeAccumulation : public GradNodeBase { public: // Constructor: configure fwd input tensors to grad node explicit GradNodeAccumulation(AutogradMeta* meta) : GradNodeBase(1, 1) { weak_grad_ = meta->WeakGrad(); SetDefaultGradInOutMeta(); } ~GradNodeAccumulation() override = default; // Functor: perform backward computations virtual std::vector> operator()( const std::vector>& grads) override; std::string name() { return "GradNodeAccumulation"; } /** * Register ReduceHook * **/ void RegisterReduceHook(std::shared_ptr&& hook); /** * Apply ReduceHook here * **/ inline bool ReduceHooksRegistered() { return reduce_hooks_.size() != 0; } void ApplyReduceHooks(); private: std::weak_ptr weak_grad_; std::function retain_grad_hook_; std::vector> reduce_hooks_; }; } // namespace egr