mirror of
https://github.com/open-mmlab/mmselfsup.git
synced 2025-06-03 14:59:38 +08:00
Merge pull request #46 from guochengqian/patch-1
Create r50_withoutsobel.py for deepclustering experiment
This commit is contained in:
commit
d191601da6
88
configs/selfsup/deepcluster/r50_withoutsobel.py
Normal file
88
configs/selfsup/deepcluster/r50_withoutsobel.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
_base_ = '../../base.py'
|
||||||
|
# model settings
|
||||||
|
num_classes = 10000
|
||||||
|
model = dict(
|
||||||
|
type='DeepCluster',
|
||||||
|
pretrained=None,
|
||||||
|
with_sobel=False,
|
||||||
|
backbone=dict(
|
||||||
|
type='ResNet',
|
||||||
|
depth=50,
|
||||||
|
in_channels=3,
|
||||||
|
out_indices=[4], # 0: conv-1, x: stage-x
|
||||||
|
norm_cfg=dict(type='BN')),
|
||||||
|
neck=dict(type='AvgPoolNeck'),
|
||||||
|
head=dict(
|
||||||
|
type='ClsHead',
|
||||||
|
with_avg_pool=False, # already has avgpool in the neck
|
||||||
|
in_channels=2048,
|
||||||
|
num_classes=num_classes))
|
||||||
|
# dataset settings
|
||||||
|
data_source_cfg = dict(
|
||||||
|
type='ImageNet',
|
||||||
|
memcached=True,
|
||||||
|
mclient_path='/mnt/lustre/share/memcached_client')
|
||||||
|
data_train_list = 'data/imagenet/meta/train.txt'
|
||||||
|
data_train_root = 'data/imagenet/jpeg/train'
|
||||||
|
dataset_type = 'DeepClusterDataset'
|
||||||
|
img_norm_cfg = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
|
||||||
|
train_pipeline = [
|
||||||
|
dict(type='RandomResizedCrop', size=224),
|
||||||
|
dict(type='RandomHorizontalFlip'),
|
||||||
|
dict(type='RandomRotation', degrees=2),
|
||||||
|
dict(
|
||||||
|
type='ColorJitter',
|
||||||
|
brightness=0.4,
|
||||||
|
contrast=0.4,
|
||||||
|
saturation=1.0,
|
||||||
|
hue=0.5),
|
||||||
|
dict(type='RandomGrayscale', p=0.2),
|
||||||
|
dict(type='ToTensor'),
|
||||||
|
dict(type='Normalize', **img_norm_cfg),
|
||||||
|
]
|
||||||
|
extract_pipeline = [
|
||||||
|
dict(type='Resize', size=256),
|
||||||
|
dict(type='CenterCrop', size=224),
|
||||||
|
dict(type='ToTensor'),
|
||||||
|
dict(type='Normalize', **img_norm_cfg),
|
||||||
|
]
|
||||||
|
data = dict(
|
||||||
|
imgs_per_gpu=64, # 32
|
||||||
|
workers_per_gpu=4,
|
||||||
|
train=dict(
|
||||||
|
type=dataset_type,
|
||||||
|
data_source=dict(
|
||||||
|
list_file=data_train_list, root=data_train_root,
|
||||||
|
**data_source_cfg),
|
||||||
|
pipeline=train_pipeline))
|
||||||
|
# additional hooks
|
||||||
|
custom_hooks = [
|
||||||
|
dict(
|
||||||
|
type='DeepClusterHook',
|
||||||
|
extractor=dict(
|
||||||
|
imgs_per_gpu=128,
|
||||||
|
workers_per_gpu=8,
|
||||||
|
dataset=dict(
|
||||||
|
type=dataset_type,
|
||||||
|
data_source=dict(
|
||||||
|
list_file=data_train_list,
|
||||||
|
root=data_train_root,
|
||||||
|
**data_source_cfg),
|
||||||
|
pipeline=extract_pipeline)),
|
||||||
|
clustering=dict(type='Kmeans', k=num_classes, pca_dim=256),
|
||||||
|
unif_sampling=True,
|
||||||
|
reweight=False,
|
||||||
|
reweight_pow=0.5,
|
||||||
|
initial=True, # call initially
|
||||||
|
interval=1)
|
||||||
|
]
|
||||||
|
# optimizer
|
||||||
|
optimizer = dict(
|
||||||
|
type='SGD', lr=0.3, momentum=0.9, weight_decay=0.00001,
|
||||||
|
nesterov=False,
|
||||||
|
paramwise_options={'\Ahead.': dict(momentum=0.)})
|
||||||
|
# learning policy
|
||||||
|
lr_config = dict(policy='step', step=[400])
|
||||||
|
checkpoint_config = dict(interval=10)
|
||||||
|
# runtime settings
|
||||||
|
total_epochs = 200
|
Loading…
x
Reference in New Issue
Block a user