提交 09817fe8 编写于 作者: G gaotingquan 提交者: Tingquan Gao

complete amp args

上级 b3f7e3b9
...@@ -15,8 +15,11 @@ Global: ...@@ -15,8 +15,11 @@ Global:
save_inference_dir: ./inference save_inference_dir: ./inference
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -16,8 +16,11 @@ Global: ...@@ -16,8 +16,11 @@ Global:
to_static: True to_static: True
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -15,8 +15,11 @@ Global: ...@@ -15,8 +15,11 @@ Global:
save_inference_dir: ./inference save_inference_dir: ./inference
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -16,8 +16,11 @@ Global: ...@@ -16,8 +16,11 @@ Global:
save_inference_dir: ./inference save_inference_dir: ./inference
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O2 level: O2
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536 scale_loss: 65536
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O2 level: O2
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -20,8 +20,11 @@ Global: ...@@ -20,8 +20,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O1 level: O1
......
...@@ -20,8 +20,11 @@ Global: ...@@ -20,8 +20,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O2 level: O2
......
...@@ -19,8 +19,11 @@ Global: ...@@ -19,8 +19,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O2 level: O2
......
...@@ -17,8 +17,11 @@ Global: ...@@ -17,8 +17,11 @@ Global:
# mixed precision training # mixed precision training
AMP: AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0 scale_loss: 128.0
use_dynamic_loss_scaling: True use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16 # O1: mixed fp16, O2: pure fp16
level: O2 level: O2
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册