diff --git a/examples/aishell/asr1/conf/chunk_conformer.yaml b/examples/aishell/asr1/conf/chunk_conformer.yaml index e07cd07c5b99a5bb1ed166315df4a0df3350f882..80b455878f5f293234ac9130c8c22f863115d9ae 100644 --- a/examples/aishell/asr1/conf/chunk_conformer.yaml +++ b/examples/aishell/asr1/conf/chunk_conformer.yaml @@ -90,7 +90,7 @@ training: optim_conf: lr: 0.001 weight_decay: 1e-6 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/aishell/asr1/conf/transformer.yaml b/examples/aishell/asr1/conf/transformer.yaml index d13f9e2f36d5ba1335749542073b21e184829030..60ec01801c634afa29a4cb0d88ec664323919500 100644 --- a/examples/aishell/asr1/conf/transformer.yaml +++ b/examples/aishell/asr1/conf/transformer.yaml @@ -80,7 +80,7 @@ training: optim_conf: lr: 0.002 weight_decay: 1e-6 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/callcenter/asr1/conf/chunk_conformer.yaml b/examples/callcenter/asr1/conf/chunk_conformer.yaml index d20d2b9a6c191ef91481cd52c21fb622f2418b05..69959c68eeac1f998ce053b62f1fb6aed30dba9b 100644 --- a/examples/callcenter/asr1/conf/chunk_conformer.yaml +++ b/examples/callcenter/asr1/conf/chunk_conformer.yaml @@ -88,7 +88,7 @@ training: optim_conf: lr: 0.001 weight_decay: 1e-6 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/callcenter/asr1/conf/conformer.yaml b/examples/callcenter/asr1/conf/conformer.yaml index f86cd4a368486d3989d57ce28c74caef645b0473..80c15abb1c30dc514a9a523bb248cf5f75665117 100644 --- a/examples/callcenter/asr1/conf/conformer.yaml +++ b/examples/callcenter/asr1/conf/conformer.yaml @@ -83,7 +83,7 @@ training: optim_conf: lr: 0.002 weight_decay: 1e-6 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/librispeech/asr1/conf/chunk_conformer.yaml b/examples/librispeech/asr1/conf/chunk_conformer.yaml index 4a5741904b740940cef6fc98015e82c52601630f..545806640df48b94f40df4673b9b8d81a2b31b40 100644 --- a/examples/librispeech/asr1/conf/chunk_conformer.yaml +++ b/examples/librispeech/asr1/conf/chunk_conformer.yaml @@ -90,7 +90,7 @@ training: optim_conf: lr: 0.001 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/librispeech/asr1/conf/chunk_transformer.yaml b/examples/librispeech/asr1/conf/chunk_transformer.yaml index c2644daf4a76b671e72e8fbae5d8f0278fdc460a..70a9dc6afb88e6aa528560a46d19b0290d78a78f 100644 --- a/examples/librispeech/asr1/conf/chunk_transformer.yaml +++ b/examples/librispeech/asr1/conf/chunk_transformer.yaml @@ -83,7 +83,7 @@ training: optim_conf: lr: 0.001 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/librispeech/asr1/conf/conformer.yaml b/examples/librispeech/asr1/conf/conformer.yaml index 684b6297699051b217af15a018db362e4c401792..ca934eb1db28251debb57b0885d343598a528ae7 100644 --- a/examples/librispeech/asr1/conf/conformer.yaml +++ b/examples/librispeech/asr1/conf/conformer.yaml @@ -86,7 +86,7 @@ training: optim_conf: lr: 0.004 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/librispeech/asr2/conf/transformer.yaml b/examples/librispeech/asr2/conf/transformer.yaml index 3e9350abc0f118e152132e8c45e579fb29cbdc8a..00240743e2404aaad9bd56c32b8e1046fde82fa5 100644 --- a/examples/librispeech/asr2/conf/transformer.yaml +++ b/examples/librispeech/asr2/conf/transformer.yaml @@ -75,7 +75,7 @@ optim: adam optim_conf: global_grad_clip: 5.0 weight_decay: 1.0e-06 -scheduler: warmuplr # pytorch v1.1.0+ required +scheduler: warmuplr scheduler_conf: lr: 0.004 warmup_steps: 25000 diff --git a/examples/ted_en_zh/st0/conf/transformer.yaml b/examples/ted_en_zh/st0/conf/transformer.yaml index 5a05fa46ea598c89b4ca80e5384364e069c1cba3..6ed75be4e08849f86c9522b190a75937d2bccfdb 100644 --- a/examples/ted_en_zh/st0/conf/transformer.yaml +++ b/examples/ted_en_zh/st0/conf/transformer.yaml @@ -82,7 +82,7 @@ training: optim_conf: lr: 0.004 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/ted_en_zh/st1/conf/transformer.yaml b/examples/ted_en_zh/st1/conf/transformer.yaml index d553bde77f732195f86d26c4b6352273a28133ca..3bef7bc5fedbda3d9d7c159f2938c3c051d03cfc 100644 --- a/examples/ted_en_zh/st1/conf/transformer.yaml +++ b/examples/ted_en_zh/st1/conf/transformer.yaml @@ -82,7 +82,7 @@ training: optim_conf: lr: 0.004 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/timit/asr1/conf/transformer.yaml b/examples/timit/asr1/conf/transformer.yaml index 89ae2fd3de5595db93056cfb9c5c391afbc1a09b..af05a6cea54e4c81e235d80588740f6d38ef9f2e 100644 --- a/examples/timit/asr1/conf/transformer.yaml +++ b/examples/timit/asr1/conf/transformer.yaml @@ -80,7 +80,7 @@ training: optim_conf: lr: 0.004 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 1200 lr_decay: 1.0 diff --git a/examples/tiny/asr1/conf/chunk_confermer.yaml b/examples/tiny/asr1/conf/chunk_confermer.yaml index 728a82e3c8377b4a401dc5f4eb89820faa988df3..76b97adf8e2d00cbc56568b735279219b8ba24e1 100644 --- a/examples/tiny/asr1/conf/chunk_confermer.yaml +++ b/examples/tiny/asr1/conf/chunk_confermer.yaml @@ -90,7 +90,7 @@ training: optim_conf: lr: 0.001 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/tiny/asr1/conf/chunk_transformer.yaml b/examples/tiny/asr1/conf/chunk_transformer.yaml index 7c927122b30ad3728d19bbc158c2d09c0d6d3873..5f1991f952fe385eb008d9d52e03a98af5cab5b9 100644 --- a/examples/tiny/asr1/conf/chunk_transformer.yaml +++ b/examples/tiny/asr1/conf/chunk_transformer.yaml @@ -83,7 +83,7 @@ training: optim_conf: lr: 0.002 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/tiny/asr1/conf/conformer.yaml b/examples/tiny/asr1/conf/conformer.yaml index 21cc112862438b46477895b4ad98e58ef075bdd8..b2937c1bdee408189c7d5f580a469d408aa2fb9c 100644 --- a/examples/tiny/asr1/conf/conformer.yaml +++ b/examples/tiny/asr1/conf/conformer.yaml @@ -86,7 +86,7 @@ training: optim_conf: lr: 0.002 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/tiny/asr1/conf/transformer.yaml b/examples/tiny/asr1/conf/transformer.yaml index f4645c681b8ba637ee9a9ca738d570321d81173e..f53197561f6a7d38c96e406193d5b59ee724dad5 100644 --- a/examples/tiny/asr1/conf/transformer.yaml +++ b/examples/tiny/asr1/conf/transformer.yaml @@ -80,7 +80,7 @@ training: optim_conf: lr: 0.002 weight_decay: 1e-06 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 25000 lr_decay: 1.0 diff --git a/examples/wenetspeech/asr1/conf/conformer.yaml b/examples/wenetspeech/asr1/conf/conformer.yaml index a3a42ec63f6d95738a2cfb890999a6677856fd1a..fc040a795200bf181e266b6367d4cd250ae8e466 100644 --- a/examples/wenetspeech/asr1/conf/conformer.yaml +++ b/examples/wenetspeech/asr1/conf/conformer.yaml @@ -87,7 +87,7 @@ training: optim_conf: lr: 0.001 weight_decay: 1e-6 - scheduler: warmuplr # pytorch v1.1.0+ required + scheduler: warmuplr scheduler_conf: warmup_steps: 5000 lr_decay: 1.0