epoch:30# employ iter to control sheduleLearningRate:base_lr:0.02# 0.02 for 8*(4+4) batchschedulers:-!PiecewiseDecaygamma:0.1milestones:[300]# do not decay lr-!LinearWarmupstart_factor:0.3333333333333333steps:1000max_iter:90000# 90k for 32 batch, 180k for 16 batchepoch_iter:1000# set epoch_iter for saving checkpoint and eval# update student params according to loss_grad every X iter.optimize_rate:1SEMISUPNET:BBOX_THRESHOLD:0.5# not usedTEACHER_UPDATE_ITER:1BURN_UP_STEP:9000EMA_KEEP_RATE:0.9996UNSUP_LOSS_WEIGHT:1.0# detailed weights for cls and loc task can be seen in cr_lossPSEUDO_WARM_UP_STEPS:2000OptimizerBuilder:optimizer:momentum:0.9type:Momentumregularizer:factor:0.0001type:L2