From 3b8e36b8057ccbbc23a0212cb51cce758660f288 Mon Sep 17 00:00:00 2001 From: CC Date: Fri, 14 Oct 2022 10:21:28 +0800 Subject: [PATCH] add new feature(gradclipnorm) for invdn (#705) --- configs/invdn_denoising.yaml | 3 ++- ppgan/models/invdn_model.py | 25 +++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/configs/invdn_denoising.yaml b/configs/invdn_denoising.yaml index cba100f..304c3ca 100644 --- a/configs/invdn_denoising.yaml +++ b/configs/invdn_denoising.yaml @@ -61,7 +61,8 @@ optimizer: - generator beta1: 0.9 beta2: 0.99 - epsilon: 1e-8 #TODO GRADIENT_CLIPPING + epsilon: 1e-8 + clip_grad_norm: 10 log_config: interval: 100 diff --git a/ppgan/models/invdn_model.py b/ppgan/models/invdn_model.py index d5d6d02..8e70046 100644 --- a/ppgan/models/invdn_model.py +++ b/ppgan/models/invdn_model.py @@ -21,6 +21,7 @@ from .base_model import BaseModel from .generators.builder import build_generator from .criterions.builder import build_criterion from ppgan.utils.visual import tensor2img +from ..solver import build_lr_scheduler, build_optimizer @MODELS.register() @@ -71,6 +72,30 @@ class InvDNModel(BaseModel): optims['optim'].step() self.losses['loss'] = l_total.numpy() + def setup_optimizers(self, lr, cfg): + if cfg.get('name', None): + cfg_ = cfg.copy() + net_names = cfg_.pop('net_names') + parameters = [] + for net_name in net_names: + parameters += self.nets[net_name].parameters() + + cfg_['grad_clip'] = nn.ClipGradByNorm(cfg_['clip_grad_norm']) + cfg_.pop('clip_grad_norm') + + self.optimizers['optim'] = build_optimizer(cfg_, lr, parameters) + else: + for opt_name, opt_cfg in cfg.items(): + cfg_ = opt_cfg.copy() + net_names = cfg_.pop('net_names') + parameters = [] + for net_name in net_names: + parameters += self.nets[net_name].parameters() + self.optimizers[opt_name] = build_optimizer( + cfg_, lr, parameters) + + return self.optimizers + def forward(self): pass -- GitLab