From 6ebb3f77ad5ac768cb06a11bd0ffc85c2be5c43c Mon Sep 17 00:00:00 2001 From: Hubert <42952108+yingfhu@users.noreply.github.com> Date: Mon, 26 Sep 2022 14:12:51 +0800 Subject: [PATCH] [Fix] Fix attenstion clamp max params (#1034) --- mmcls/models/utils/attention.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mmcls/models/utils/attention.py b/mmcls/models/utils/attention.py index 4e795ed0..1aae72ae 100644 --- a/mmcls/models/utils/attention.py +++ b/mmcls/models/utils/attention.py @@ -261,10 +261,7 @@ class WindowMSAV2(BaseModule): attn = ( F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1)) logit_scale = torch.clamp( - self.logit_scale, - max=torch.log( - torch.tensor(1. / 0.01, - device=self.logit_scale.device))).exp() + self.logit_scale, max=np.log(1. / 0.01)).exp() attn = attn * logit_scale relative_position_bias_table = self.cpb_mlp(