fastvit: don't dropout in eval

This commit is contained in:
Yassine 2023-10-03 11:57:24 -07:00 committed by Ross Wightman
parent 054c763fca
commit b500cae4c5

View File

@ -514,7 +514,7 @@ class Attention(nn.Module):
if self.fused_attn:
x = torch.nn.functional.scaled_dot_product_attention(
q, k, v,
dropout_p=self.attn_drop.p,
dropout_p=self.attn_drop.p if self.training else 0.0,
)
else:
q = q * self.scale