complete amp args

pull/2526/head
gaotingquan 2023-05-29 09:14:57 +00:00 committed by Tingquan Gao
parent b3f7e3b974
commit 09817fe859
34 changed files with 102 additions and 0 deletions

View File

@ -15,8 +15,11 @@ Global:
save_inference_dir: ./inference
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -16,8 +16,11 @@ Global:
to_static: True
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -15,8 +15,11 @@ Global:
save_inference_dir: ./inference
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -16,8 +16,11 @@ Global:
save_inference_dir: ./inference
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O2

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 65536
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O2

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -20,8 +20,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O1

View File

@ -20,8 +20,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O2

View File

@ -19,8 +19,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O2

View File

@ -17,8 +17,11 @@ Global:
# mixed precision training
AMP:
use_amp: True
use_fp16_test: False
scale_loss: 128.0
use_dynamic_loss_scaling: True
use_promote: False
# O1: mixed fp16, O2: pure fp16
level: O2