未验证 提交 964211a8 编写于 作者: H HuangLiangJie 提交者: GitHub

Change optimizer for vits, test=tts (#2791)

上级 96d76c83
...@@ -24,7 +24,7 @@ import yaml ...@@ -24,7 +24,7 @@ import yaml
from paddle import DataParallel from paddle import DataParallel
from paddle import distributed as dist from paddle import distributed as dist
from paddle.io import DataLoader from paddle.io import DataLoader
from paddle.optimizer import Adam from paddle.optimizer import AdamW
from yacs.config import CfgNode from yacs.config import CfgNode
from paddlespeech.t2s.datasets.am_batch_fn import vits_multi_spk_batch_fn from paddlespeech.t2s.datasets.am_batch_fn import vits_multi_spk_batch_fn
...@@ -164,14 +164,14 @@ def train_sp(args, config): ...@@ -164,14 +164,14 @@ def train_sp(args, config):
lr_schedule_g = scheduler_classes[config["generator_scheduler"]]( lr_schedule_g = scheduler_classes[config["generator_scheduler"]](
**config["generator_scheduler_params"]) **config["generator_scheduler_params"])
optimizer_g = Adam( optimizer_g = AdamW(
learning_rate=lr_schedule_g, learning_rate=lr_schedule_g,
parameters=gen_parameters, parameters=gen_parameters,
**config["generator_optimizer_params"]) **config["generator_optimizer_params"])
lr_schedule_d = scheduler_classes[config["discriminator_scheduler"]]( lr_schedule_d = scheduler_classes[config["discriminator_scheduler"]](
**config["discriminator_scheduler_params"]) **config["discriminator_scheduler_params"])
optimizer_d = Adam( optimizer_d = AdamW(
learning_rate=lr_schedule_d, learning_rate=lr_schedule_d,
parameters=dis_parameters, parameters=dis_parameters,
**config["discriminator_optimizer_params"]) **config["discriminator_optimizer_params"])
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册