提交 0f86c555 编写于 作者: G gaotingquan 提交者: Tingquan Gao

add amp args, use_amp=False

上级 2d8346cd
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: AlexNet
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSPDarkNet53
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_base_224
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_base_384
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_large_224
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_large_384
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_small_224
......
......@@ -17,6 +17,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CSWinTransformer_tiny_224
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_base_224
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_base_384
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_large_224
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_large_384
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_small
......
......@@ -22,6 +22,18 @@ EMA:
decay: 0.9999
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ConvNeXt_tiny
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_13_224
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_13_384
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_21_224
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_21_384
......
......@@ -17,6 +17,18 @@ Global:
to_static: False
update_freq: 2 # for 8 cards
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: CvT_W24_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA102
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA102x
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA102x2
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA169
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA34
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA46_c
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA46x_c
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA60
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA60x
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DLA60x_c
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN107
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN131
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN68
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN92
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DPN98
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DSNet_base
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DSNet_small
......
......@@ -16,6 +16,18 @@ Global:
# training model under @to_static
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DSNet_tiny
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 256, 256]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DarkNet53
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ResNet50
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_base_distilled_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 384, 384]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_base_distilled_patch16_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_base_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 384, 384]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_base_patch16_384
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_small_distilled_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_small_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_tiny_distilled_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DeiT_tiny_patch16_224
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet121
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet161
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet169
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet201
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: DenseNet264
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: "./inference"
use_dali: false
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: "./inference"
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: ./inference
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: "./inference"
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: ./inference
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -15,6 +15,18 @@ Global:
save_inference_dir: ./inference
to_static: False
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: "./inference"
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: "./inference"
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: "DistillationModel"
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ESNet_x0_25
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ESNet_x0_5
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ESNet_x0_75
......
......@@ -13,6 +13,18 @@ Global:
# used for static mode and model export
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: ESNet_x1_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 240, 240]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB1
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 260, 260]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB2
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 300, 300]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB3
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 380, 380]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB4
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 456, 456]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 528, 528]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB6
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 600, 600]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: EfficientNetB7
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: GhostNet_x0_5
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: GhostNet_x1_0
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: GhostNet_x1_3
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W18_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W30_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W32_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W40_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W44_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W48_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HRNet_W64_C
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HarDNet39_ds
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HarDNet68
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HarDNet68_ds
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: HarDNet85
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: GoogLeNet
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: InceptionV3
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 299, 299]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: InceptionV4
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: LeViT_128
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: LeViT_128S
......
......@@ -14,6 +14,18 @@ Global:
image_shape: [3, 224, 224]
save_inference_dir: ./inference
# mixed precision
AMP:
use_amp: False
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
Arch:
name: LeViT_192
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册