optimizer_120e.yml 303 字节
Newer Older
1 2 3 4 5 6
epoch: 120

LearningRate:
  base_lr: 0.001
  schedulers:
  - !PiecewiseDecay
7 8 9
    milestones: [40, 60, 80, 100]
    values: [0.001, 0.0005, 0.00025, 0.0001, 0.00001]
    use_warmup: false
10 11 12

OptimizerBuilder:
  optimizer:
F
Feng Ni 已提交
13 14
    momentum: 0.0
    type: RMSProp
15
  regularizer:
F
Feng Ni 已提交
16
    factor: 0.00005
17
    type: L2