From 1bdb7960ba9ec8d496e35bf3318dddc3430de173 Mon Sep 17 00:00:00 2001 From: wangzhen38 <41941775+wangzhen38@users.noreply.github.com> Date: Wed, 11 Jan 2023 14:11:02 +0800 Subject: [PATCH] [rm fluid] dgc_optimizer (#49714) --- .../distributed/fleet/meta_optimizers/dgc_optimizer.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/python/paddle/distributed/fleet/meta_optimizers/dgc_optimizer.py b/python/paddle/distributed/fleet/meta_optimizers/dgc_optimizer.py index 5c97fe90a2e..96b3802a794 100644 --- a/python/paddle/distributed/fleet/meta_optimizers/dgc_optimizer.py +++ b/python/paddle/distributed/fleet/meta_optimizers/dgc_optimizer.py @@ -21,9 +21,8 @@ __all__ = [] import paddle from paddle.common_ops_import import LayerHelper from paddle.fluid.dygraph import base as imperative_base -from paddle.fluid.framework import in_dygraph_mode from paddle.fluid.optimizer import Momentum, Optimizer -from paddle.framework import core +from paddle.framework import core, in_dygraph_mode from paddle.nn.clip import ClipGradByNorm, append_gradient_clip_ops from paddle.static import create_global_var @@ -101,7 +100,7 @@ class DGCMomentumOptimizer(Optimizer): if regularization is not None: regular_coeff = regularization._regularization_coeff - from paddle.fluid.regularizer import L1Decay, L2Decay + from paddle.regularizer import L1Decay, L2Decay if isinstance(regularization, L1Decay): regular_type = 1 @@ -123,7 +122,7 @@ class DGCMomentumOptimizer(Optimizer): return True def _append_optimize_op(self, block, param_and_grad): - assert isinstance(block, paddle.fluid.framework.Block) + assert isinstance(block, paddle.framework.Block) velocity_acc = self._get_accumulator( self._u_velocity_acc_str, param_and_grad[0] ) -- GitLab