提交 0f86c555 编写于 作者: G gaotingquan 提交者: Tingquan Gao

add amp args, use_amp=False

上级 2d8346cd
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: AlexNet
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSPDarkNet53
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_base_224
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_base_384
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_large_224
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_large_384
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_small_224
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_tiny_224
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_base_224
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_base_384
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_large_224
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_large_384
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_small
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_tiny
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_13_224
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_13_384
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_21_224
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_21_384
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_W24_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA102
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA102x
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA102x2
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA169
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA34
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA46_c
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA46x_c
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA60
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA60x
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA60x_c
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN107
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN131
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN68
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN92
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN98
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DSNet_base
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DSNet_small
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DSNet_tiny
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DarkNet53
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_base_distilled_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 384, 384]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_base_distilled_patch16_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_base_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 384, 384]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_base_patch16_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_small_distilled_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_small_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_tiny_distilled_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_tiny_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet121
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet161
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet169
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet201
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet264
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: "./inference"
use_dali: false
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: "./inference"
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: ./inference
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: "./inference"
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: ./inference
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: ./inference
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: "./inference"
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: "./inference"
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ESNet_x0_25
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ESNet_x0_5
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ESNet_x0_75
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ESNet_x1_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 240, 240]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB1
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 260, 260]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB2
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 300, 300]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB3
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 380, 380]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB4
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 456, 456]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 528, 528]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB6
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 600, 600]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB7
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: GhostNet_x0_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: GhostNet_x1_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: GhostNet_x1_3
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W18_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W30_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W32_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W40_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W44_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W48_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W64_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HarDNet39_ds
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HarDNet68
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HarDNet68_ds
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HarDNet85
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: GoogLeNet
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: InceptionV3
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: InceptionV4
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: LeViT_128
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: LeViT_128S
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: LeViT_192
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: LeViT_256
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: LeViT_384
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MicroNet_M0
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MicroNet_M1
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MicroNet_M2
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MicroNet_M3
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MixNet_L
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MixNet_M
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MixNet_S
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNeXt_x1_0
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV1
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV1_x0_25
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV1_x0_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV1_x0_75
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV2
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV2_x0_25
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV2_x0_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV2_x0_75
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV2_x1_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV2_x2_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_large_x0_35
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_large_x0_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_large_x0_75
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_large_x1_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_large_x1_25
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_small_x0_35
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_small_x0_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_small_x0_75
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_small_x1_0
......
......@@ -15,6 +15,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_small_x1_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileNetV3_small_x1_25
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
use_dali: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileViT_S
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
use_dali: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileViT_XS
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
use_dali: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: MobileViT_XXS
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x0_25
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x0_35
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x0_5
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x0_75
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x1_0
......
......@@ -15,6 +15,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x1_0
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x1_5
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x2_0
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNet_x2_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNetV2_base
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNetV2_large
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PPLCNetV2_small
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PVT_V2_B0
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PVT_V2_B1
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PVT_V2_B2
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PVT_V2_B2_Linear
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PVT_V2_B3
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PVT_V2_B4
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PVT_V2_B5
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: PeleeNet
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ReXNet_1_0
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ReXNet_1_3
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ReXNet_1_5
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ReXNet_2_0
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ReXNet_3_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RedNet101
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RedNet152
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RedNet26
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RedNet38
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RedNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_12GF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_1600MF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_16GF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_200MF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_3200MF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_32GF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_400MF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_600MF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_6400MF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_800MF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RegNetX_8GF
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_A0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_A1
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_A2
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_B0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_B1
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_B1g2
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_B1g4
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_B2
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_B2g4
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_B3
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_B3g4
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 320, 320]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: RepVGG_D2se
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Res2Net101_vd_26w_4s
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Res2Net200_vd_26w_4s
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Res2Net50_14w_8s
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Res2Net50_26w_4s
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Res2Net50_vd_26w_4s
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeSt101
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 320, 320]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeSt200
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 416, 416]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeSt269
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeSt50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeSt50_fast_1s1x64d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt101_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt101_64x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt101_vd_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt101_vd_64x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt152_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt152_64x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt152_vd_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt152_vd_64x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt50_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt50_64x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt50_vd_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt50_vd_64x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt101_32x16d_wsl
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt101_32x32d_wsl
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt101_32x48d_wsl
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNeXt101_32x8d_wsl
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet101
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet101_vd
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet152
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet152_vd
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet18
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet18
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet18_vd
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet200_vd
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet34
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet34_vd
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50_vd
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SENet154_vd
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SE_ResNeXt101_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SE_ResNeXt50_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SE_ResNeXt50_vd_32x4d
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SE_ResNet18_vd
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SE_ResNet34_vd
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SE_ResNet50_vd
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ShuffleNetV2_swish
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ShuffleNetV2_x0_25
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ShuffleNetV2_x0_33
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ShuffleNetV2_x0_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ShuffleNetV2_x1_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ShuffleNetV2_x1_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ShuffleNetV2_x2_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SqueezeNet1_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SqueezeNet1_1
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformer_base_patch4_window12_384
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformer_base_patch4_window7_224
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformer_large_patch4_window12_384
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformer_large_patch4_window7_224
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformer_small_patch4_window7_224
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformer_tiny_patch4_window7_224
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_base_patch4_window16_256
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_base_patch4_window24_384
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_base_patch4_window8_256
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_large_patch4_window16_256
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_large_patch4_window24_384
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_small_patch4_window16_256
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_small_patch4_window8_256
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_tiny_patch4_window16_256
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: SwinTransformerV2_tiny_patch4_window8_256
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: TNT_base
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: TNT_small
......
......@@ -18,6 +18,18 @@ Global:
EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: TinyNet_A
......
......@@ -18,6 +18,18 @@ Global:
EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: TinyNet_B
......
......@@ -18,6 +18,18 @@ Global:
EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: TinyNet_C
......
......@@ -18,6 +18,18 @@ Global:
EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: TinyNet_D
......
......@@ -18,6 +18,18 @@ Global:
EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: TinyNet_E
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: alt_gvt_base
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: alt_gvt_large
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: alt_gvt_small
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: pcpvt_base
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: pcpvt_large
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: pcpvt_small
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: UniFormer_base
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: UniFormer_base_ls
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: UniFormer_small
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: UniFormer_small_plus
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: UniFormer_small_plus_dim64
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: VAN_B0
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: VAN_B1
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: VAN_B2
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: VAN_B3
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: VGG11
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: VGG13
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: VGG16
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: VGG19
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ViT_base_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 384, 384]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ViT_base_patch16_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 384, 384]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ViT_base_patch32_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ViT_large_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 384, 384]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ViT_large_patch16_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 384, 384]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ViT_large_patch32_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ViT_small_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Xception41
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Xception41_deeplab
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Xception65
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Xception65_deeplab
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: Xception71
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册