optimizer_60e.yml 296 字节
Newer Older
G
George Ni 已提交
1 2 3 4 5 6 7 8 9
epoch: 60

LearningRate:
  base_lr: 0.01
  schedulers:
  - !PiecewiseDecay
    gamma: 0.1
    milestones: [30, 44]
    use_warmup: True
10
  - !ExpWarmup
G
George Ni 已提交
11
    steps: 1000
12
    power: 4
G
George Ni 已提交
13 14 15 16 17 18 19 20

OptimizerBuilder:
  optimizer:
    momentum: 0.9
    type: Momentum
  regularizer:
    factor: 0.0001
    type: L2