unify comments

pull/2526/head
gaotingquan 2023-05-29 09:06:10 +00:00 committed by Tingquan Gao
parent 8405882f11
commit b3f7e3b974
34 changed files with 34 additions and 34 deletions

View File

@ -17,7 +17,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -18,7 +18,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -17,7 +17,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -18,7 +18,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
EMA:

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 65536
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model ema

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -22,7 +22,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O1: mixed fp16
# O1: mixed fp16, O2: pure fp16
level: O1
# model architecture

View File

@ -22,7 +22,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture

View File

@ -21,7 +21,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture

View File

@ -19,7 +19,7 @@ Global:
AMP:
scale_loss: 128.0
use_dynamic_loss_scaling: True
# O2: pure fp16
# O1: mixed fp16, O2: pure fp16
level: O2
# model architecture