From 7eb7d1384527fb8841d347b5c67d0a9facc34730 Mon Sep 17 00:00:00 2001 From: Thorsten Hempel <42150441+thohemp@users.noreply.github.com> Date: Wed, 13 Sep 2023 10:38:13 +0200 Subject: [PATCH] Fix in_features for linear layer in reset_classifier. --- timm/models/ghostnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/models/ghostnet.py b/timm/models/ghostnet.py index b7c0f5dd..d34b5485 100644 --- a/timm/models/ghostnet.py +++ b/timm/models/ghostnet.py @@ -276,7 +276,7 @@ class GhostNet(nn.Module): # cannot meaningfully change pooling of efficient head after creation self.global_pool = SelectAdaptivePool2d(pool_type=global_pool) self.flatten = nn.Flatten(1) if global_pool else nn.Identity() # don't flatten if pooling disabled - self.classifier = Linear(self.pool_dim, num_classes) if num_classes > 0 else nn.Identity() + self.classifier = Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity() def forward_features(self, x): x = self.conv_stem(x)