Add missing docs in SwinTransformerStage

pull/2053/head
akiyuki ishikawa 2023-12-05 11:07:03 +09:00 committed by Ross Wightman
parent df7ae11eb2
commit 4f2e1bf4cb
2 changed files with 3 additions and 1 deletions

View File

@ -384,6 +384,7 @@ class SwinTransformerStage(nn.Module):
Args:
dim: Number of input channels.
input_resolution: Input resolution.
out_dim: Number of output channels.
depth: Number of blocks.
downsample: Downsample layer at the end of the layer.
num_heads: Number of attention heads.

View File

@ -378,6 +378,7 @@ class SwinTransformerV2Stage(nn.Module):
"""
Args:
dim: Number of input channels.
out_dim: Number of output channels.
input_resolution: Input resolution.
depth: Number of blocks.
num_heads: Number of attention heads.
@ -640,7 +641,7 @@ def checkpoint_filter_fn(state_dict, model):
k = re.sub(r'layers.(\d+).downsample', lambda x: f'layers.{int(x.group(1)) + 1}.downsample', k)
k = k.replace('head.', 'head.fc.')
out_dict[k] = v
return out_dict