optimizer_120e.yml 303 字节
Newer Older
1 2 3 4 5 6
epoch: 120

LearningRate:
  base_lr: 0.001
  schedulers:
  - !PiecewiseDecay
7 8 9
    milestones: [40, 60, 80, 100]
    values: [0.001, 0.0005, 0.00025, 0.0001, 0.00001]
    use_warmup: false
10 11 12 13 14 15 16 17

OptimizerBuilder:
  optimizer:
    momentum: 0.9
    type: Momentum
  regularizer:
    factor: 0.0005
    type: L2