optimizer_120e.yml 281 字节
Newer Older
1 2 3 4 5 6
epoch: 120

LearningRate:
  base_lr: 0.001
  schedulers:
  - !PiecewiseDecay
7
    milestones: [40, 60, 80, 100]
8
    gamma: [0.5, 0.5, 0.4, 0.1]
9
    use_warmup: false
10 11 12

OptimizerBuilder:
  optimizer:
F
Feng Ni 已提交
13 14
    momentum: 0.0
    type: RMSProp
15
  regularizer:
F
Feng Ni 已提交
16
    factor: 0.00005
17
    type: L2