optimizer_120e.yml 303 字节
Newer Older
1 2 3 4 5 6
epoch: 120

LearningRate:
  base_lr: 0.001
  schedulers:
  - !PiecewiseDecay
7 8 9
    milestones: [40, 60, 80, 100]
    values: [0.001, 0.0005, 0.00025, 0.0001, 0.00001]
    use_warmup: false
10 11 12

OptimizerBuilder:
  optimizer:
13 14
    momentum: 0.9
    type: Momentum
15
  regularizer:
16
    factor: 0.0005
17
    type: L2