mirror of
https://github.com/open-mmlab/mmclassification.git
synced 2025-06-03 21:53:55 +08:00
* remove basehead * add moco series * add byol simclr simsiam * add ut * update configs * add simsiam hook * add and refactor beit * update ut * add cae * update extract_feat * refactor cae * add mae * refactor data preprocessor * update heads * add maskfeat * add milan * add simmim * add mixmim * fix lint * fix ut * fix lint * add eva * add densecl * add barlowtwins * add swav * fix lint * update readtherdocs rst * update docs * update * Decrease UT memory usage * Fix docstring * update DALLEEncoder * Update model docs * refactor dalle encoder * update docstring * fix ut * fix config error * add val_cfg and test_cfg * refactor clip generator * fix lint * pass check * fix ut * add lars * update type of BEiT in configs * Use MMEngine style momentum in EMA. * apply mmpretrain solarize --------- Co-authored-by: mzr1996 <mzr1996@163.com>
49 lines
1.6 KiB
Python
49 lines
1.6 KiB
Python
# Copyright (c) OpenMMLab. All rights reserved.
|
|
from typing import Optional, Sequence
|
|
|
|
from mmengine.hooks import Hook
|
|
|
|
from mmpretrain.registry import HOOKS
|
|
|
|
|
|
@HOOKS.register_module()
|
|
class SimSiamHook(Hook):
|
|
"""Hook for SimSiam.
|
|
|
|
This hook is for SimSiam to fix learning rate of predictor.
|
|
|
|
Args:
|
|
fix_pred_lr (bool): whether to fix the lr of predictor or not.
|
|
lr (float): the value of fixed lr.
|
|
adjust_by_epoch (bool, optional): whether to set lr by epoch or iter.
|
|
Defaults to True.
|
|
"""
|
|
|
|
def __init__(self,
|
|
fix_pred_lr: bool,
|
|
lr: float,
|
|
adjust_by_epoch: Optional[bool] = True) -> None:
|
|
self.fix_pred_lr = fix_pred_lr
|
|
self.lr = lr
|
|
self.adjust_by_epoch = adjust_by_epoch
|
|
|
|
def before_train_iter(self,
|
|
runner,
|
|
batch_idx: int,
|
|
data_batch: Optional[Sequence[dict]] = None) -> None:
|
|
"""fix lr of predictor by iter."""
|
|
if self.adjust_by_epoch:
|
|
return
|
|
else:
|
|
if self.fix_pred_lr:
|
|
for param_group in runner.optim_wrapper.optimizer.param_groups:
|
|
if 'fix_lr' in param_group and param_group['fix_lr']:
|
|
param_group['lr'] = self.lr
|
|
|
|
def before_train_epoch(self, runner) -> None:
|
|
"""fix lr of predictor by epoch."""
|
|
if self.fix_pred_lr:
|
|
for param_group in runner.optim_wrapper.optimizer.param_groups:
|
|
if 'fix_lr' in param_group and param_group['fix_lr']:
|
|
param_group['lr'] = self.lr
|