Fix F.sdpa attn drop prob

pull/2092/head
Ross Wightman 2024-02-10 20:14:47 -08:00
parent 0737cf231d
commit 935950cc11
1 changed files with 1 additions and 1 deletions

View File

@ -263,7 +263,7 @@ class EfficientAttention(nn.Module):
if self.fused_attn:
x = F.scaled_dot_product_attention(
q, k, v,
dropout_p=self.attn_drop if self.training else 0.,
dropout_p=self.attn_drop.p if self.training else 0.,
)
else:
q = q * self.scale