From c7a1252f20f8c3d9979445f91dbe6706953efcee Mon Sep 17 00:00:00 2001 From: Yang Zhang Date: Tue, 28 Apr 2020 12:45:20 +0800 Subject: [PATCH] Revert back to `fluid.clip` for gradient clipping (#568) * Revert back to `fluid.clip` for gradient clipping current API is in flux, wait for it to stabilize * Please CI still buggy --- configs/anchor_free/fcos_dcn_r50_fpn_1x.yml | 1 - ppdet/modeling/ops.py | 4 ++-- ppdet/optimizer.py | 4 ++++ tools/train.py | 6 +----- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/configs/anchor_free/fcos_dcn_r50_fpn_1x.yml b/configs/anchor_free/fcos_dcn_r50_fpn_1x.yml index ff46e744e..7a4d770cd 100644 --- a/configs/anchor_free/fcos_dcn_r50_fpn_1x.yml +++ b/configs/anchor_free/fcos_dcn_r50_fpn_1x.yml @@ -180,4 +180,3 @@ TestReader: use_padded_im_info: true batch_size: 1 shuffle: false - diff --git a/ppdet/modeling/ops.py b/ppdet/modeling/ops.py index 1f758ec0d..f4787cf70 100644 --- a/ppdet/modeling/ops.py +++ b/ppdet/modeling/ops.py @@ -29,8 +29,8 @@ __all__ = [ 'AnchorGenerator', 'AnchorGrid', 'DropBlock', 'RPNTargetAssign', 'GenerateProposals', 'MultiClassNMS', 'BBoxAssigner', 'MaskAssigner', 'RoIAlign', 'RoIPool', 'MultiBoxHead', 'SSDLiteMultiBoxHead', - 'SSDOutputDecoder', 'RetinaTargetAssign', 'RetinaOutputDecoder', - 'ConvNorm', 'DeformConvNorm', 'MultiClassSoftNMS', 'LibraBBoxAssigner' + 'SSDOutputDecoder', 'RetinaTargetAssign', 'RetinaOutputDecoder', 'ConvNorm', + 'DeformConvNorm', 'MultiClassSoftNMS', 'LibraBBoxAssigner' ] diff --git a/ppdet/optimizer.py b/ppdet/optimizer.py index 21920bb66..5f7cfefab 100644 --- a/ppdet/optimizer.py +++ b/ppdet/optimizer.py @@ -197,6 +197,10 @@ class OptimizerBuilder(): self.optimizer = optimizer def __call__(self, learning_rate): + if self.clip_grad_by_norm is not None: + fluid.clip.set_gradient_clip( + clip=fluid.clip.GradientClipByGlobalNorm( + clip_norm=self.clip_grad_by_norm)) if self.regularizer: reg_type = self.regularizer['type'] + 'Decay' reg_factor = self.regularizer['factor'] diff --git a/tools/train.py b/tools/train.py index 477fe669e..e2d21cf80 100644 --- a/tools/train.py +++ b/tools/train.py @@ -126,11 +126,7 @@ def main(): loss *= ctx.get_loss_scale_var() lr = lr_builder() optimizer = optim_builder(lr) - clip = None - if optim_builder.clip_grad_by_norm is not None: - clip = fluid.clip.GradientClipByGlobalNorm( - clip_norm=optim_builder.clip_grad_by_norm) - optimizer.minimize(loss, grad_clip=clip) + optimizer.minimize(loss) if FLAGS.fp16: loss /= ctx.get_loss_scale_var() -- GitLab