提交 b3f7e3b9 编写于 作者: G gaotingquan 提交者: Tingquan Gao

unify comments

上级 8405882f
......@@ -17,7 +17,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -18,7 +18,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -17,7 +17,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -18,7 +18,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
EMA:
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -22,7 +22,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture
......
......@@ -22,7 +22,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture
......
......@@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture
......
......@@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册