fix fused_attn with fp32 (#3364)

develop
zhangyubo0722 2025-04-22 15:04:26 +08:00 committed by GitHub
parent d15bbf82cf
commit 496011d09b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 8 additions and 2 deletions

View File

@ -66,6 +66,10 @@ class Engine(object):
self.is_rec = True
else:
self.is_rec = False
if self.config["Arch"].get("use_fused_attn", False):
if not self.config.get("AMP", {}).get("use_amp", False):
self.config["Arch"]["use_fused_attn"] = False
self.config["Arch"]["use_fused_linear"] = False
# set seed
seed = self.config["Global"].get("seed", False)
@ -106,8 +110,8 @@ class Engine(object):
# set device
assert self.config["Global"]["device"] in [
"cpu", "gpu", "xpu", "npu", "mlu", "dcu", "ascend", "intel_gpu", "mps",
"gcu"
"cpu", "gpu", "xpu", "npu", "mlu", "dcu", "ascend", "intel_gpu",
"mps", "gcu"
]
self.device = paddle.set_device(self.config["Global"]["device"])
logger.info('train with paddle {} and device {}'.format(

View File

@ -0,0 +1,2 @@
0 wired_table
1 wireless_table