From 2867a34631a245afdc0178032dadb96cbb00c0d9 Mon Sep 17 00:00:00 2001 From: Bubbliiiing <47347516+bubbliiiing@users.noreply.github.com> Date: Mon, 7 Sep 2020 13:30:16 +0800 Subject: [PATCH] Update train.py --- train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index a7a6b76..a5a8885 100644 --- a/train.py +++ b/train.py @@ -84,7 +84,7 @@ if __name__ == "__main__": param.requires_grad = False optimizer = optim.Adam(net.parameters(), lr=lr) - lr_scheduler = optim.lr_scheduler.StepLR(optimizer,step_size=1,gamma=0.9) + lr_scheduler = optim.lr_scheduler.StepLR(optimizer,step_size=1,gamma=0.95) for epoch in range(Start_iter,Freeze_epoch): with tqdm(total=epoch_size,desc=f'Epoch {epoch + 1}/{Freeze_epoch}',postfix=dict,mininterval=0.3) as pbar: loc_loss = 0 @@ -131,7 +131,7 @@ if __name__ == "__main__": param.requires_grad = True optimizer = optim.Adam(net.parameters(), lr=freeze_lr) - lr_scheduler = optim.lr_scheduler.StepLR(optimizer,step_size=1,gamma=0.9) + lr_scheduler = optim.lr_scheduler.StepLR(optimizer,step_size=1,gamma=0.95) for epoch in range(Freeze_epoch,Epoch): with tqdm(total=epoch_size,desc=f'Epoch {epoch + 1}/{Freeze_epoch}',postfix=dict,mininterval=0.3) as pbar: loc_loss = 0 -- GitLab