optimizer_30e.yml 203 字节
Newer Older
F
FlyingQianMM 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
epoch: 30

LearningRate:
  base_lr: 0.0001
  schedulers:
  - !PiecewiseDecay
    gamma: 0.1
    milestones: [20,]
    use_warmup: False

OptimizerBuilder:
  optimizer:
    type: Adam
  regularizer: NULL