提交 aa04e265 编写于 作者: H Hui Zhang

rm uesless comment

上级 094d05f6
......@@ -90,7 +90,7 @@ training:
optim_conf:
lr: 0.001
weight_decay: 1e-6
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -80,7 +80,7 @@ training:
optim_conf:
lr: 0.002
weight_decay: 1e-6
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -88,7 +88,7 @@ training:
optim_conf:
lr: 0.001
weight_decay: 1e-6
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -83,7 +83,7 @@ training:
optim_conf:
lr: 0.002
weight_decay: 1e-6
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -90,7 +90,7 @@ training:
optim_conf:
lr: 0.001
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -83,7 +83,7 @@ training:
optim_conf:
lr: 0.001
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -86,7 +86,7 @@ training:
optim_conf:
lr: 0.004
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -75,7 +75,7 @@ optim: adam
optim_conf:
global_grad_clip: 5.0
weight_decay: 1.0e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
lr: 0.004
warmup_steps: 25000
......
......@@ -82,7 +82,7 @@ training:
optim_conf:
lr: 0.004
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -82,7 +82,7 @@ training:
optim_conf:
lr: 0.004
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -80,7 +80,7 @@ training:
optim_conf:
lr: 0.004
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 1200
lr_decay: 1.0
......
......@@ -90,7 +90,7 @@ training:
optim_conf:
lr: 0.001
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -83,7 +83,7 @@ training:
optim_conf:
lr: 0.002
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -86,7 +86,7 @@ training:
optim_conf:
lr: 0.002
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -80,7 +80,7 @@ training:
optim_conf:
lr: 0.002
weight_decay: 1e-06
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
lr_decay: 1.0
......
......@@ -87,7 +87,7 @@ training:
optim_conf:
lr: 0.001
weight_decay: 1e-6
scheduler: warmuplr # pytorch v1.1.0+ required
scheduler: warmuplr
scheduler_conf:
warmup_steps: 5000
lr_decay: 1.0
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册