[Enhancement] Upgrade isort in pre-commit hook (#44)
* [Docs] update batch size * [Enhancement] Upgrade isort in pre-commit hook * update mmdet version * update mmcls version * update cipull/45/head^2^2
parent
1ad6ff7dca
commit
839115f222
|
@ -3,12 +3,8 @@ repos:
|
|||
rev: 3.8.3
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/asottile/seed-isort-config
|
||||
rev: v2.2.0
|
||||
hooks:
|
||||
- id: seed-isort-config
|
||||
- repo: https://github.com/timothycrosley/isort
|
||||
rev: 4.3.21
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.10.1
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/pre-commit/mirrors-yapf
|
||||
|
|
|
@ -32,7 +32,7 @@ assert (digit_version(mmcv_minimum_version) <= mmcv_version
|
|||
f'Please install mmcv>={mmcv_minimum_version}, <={mmcv_maximum_version}.'
|
||||
|
||||
mmdet_minimum_version = '2.16.0'
|
||||
mmdet_maximum_version = '2.20.0'
|
||||
mmdet_maximum_version = '2.21.0'
|
||||
mmdet_version = digit_version(mmdet.__version__)
|
||||
|
||||
|
||||
|
@ -43,7 +43,7 @@ assert (digit_version(mmdet_minimum_version) <= mmdet_version
|
|||
<={mmdet_maximum_version}.'
|
||||
|
||||
mmcls_minimum_version = '0.15.0'
|
||||
mmcls_maximum_version = '0.19.0'
|
||||
mmcls_maximum_version = '0.21.0'
|
||||
mmcls_version = digit_version(mmcls.__version__)
|
||||
|
||||
|
||||
|
|
|
@ -39,8 +39,9 @@ class QuerySupportEvalHook(BaseEvalHook):
|
|||
# identified the 'train', 'val' and 'model_init' stages instead
|
||||
# of `return_loss` in mmdet. Thus, `single_gpu_test` should be
|
||||
# imported from mmfewshot.
|
||||
from mmfewshot.detection.apis import \
|
||||
(single_gpu_model_init, single_gpu_test)
|
||||
from mmfewshot.detection.apis import (single_gpu_model_init,
|
||||
single_gpu_test)
|
||||
|
||||
# `single_gpu_model_init` extracts features from
|
||||
# `model_init_dataloader` for model initialization with single gpu.
|
||||
single_gpu_model_init(runner.model, self.model_init_dataloader)
|
||||
|
@ -102,8 +103,9 @@ class QuerySupportDistEvalHook(BaseDistEvalHook):
|
|||
# identified the 'train', 'val' and 'model_init' stages instead
|
||||
# of `return_loss` in mmdet. Thus, `multi_gpu_test` should be
|
||||
# imported from mmfewshot.
|
||||
from mmfewshot.detection.apis import \
|
||||
(multi_gpu_model_init, multi_gpu_test)
|
||||
from mmfewshot.detection.apis import (multi_gpu_model_init,
|
||||
multi_gpu_test)
|
||||
|
||||
# Noted that `model_init_dataloader` should NOT use distributed
|
||||
# sampler to make all the models on different gpus get same data
|
||||
# results in the same initialized models.
|
||||
|
|
|
@ -189,6 +189,7 @@ def build_dataloader(dataset: Dataset,
|
|||
seed=seed) if seed is not None else None
|
||||
if isinstance(dataset, QueryAwareDataset):
|
||||
from mmfewshot.utils import multi_pipeline_collate_fn
|
||||
|
||||
# `QueryAwareDataset` will return a list of DataContainer
|
||||
# `multi_pipeline_collate_fn` are designed to handle
|
||||
# the data with list[list[DataContainer]]
|
||||
|
@ -203,12 +204,12 @@ def build_dataloader(dataset: Dataset,
|
|||
worker_init_fn=init_fn,
|
||||
**kwargs)
|
||||
elif isinstance(dataset, NWayKShotDataset):
|
||||
from .dataloader_wrappers import NWayKShotDataloader
|
||||
from mmfewshot.utils import multi_pipeline_collate_fn
|
||||
from .dataloader_wrappers import NWayKShotDataloader
|
||||
|
||||
# `NWayKShotDataset` will return a list of DataContainer
|
||||
# `multi_pipeline_collate_fn` are designed to handle
|
||||
# the data with list[list[DataContainer]]
|
||||
|
||||
# initialize query dataloader
|
||||
query_data_loader = DataLoader(
|
||||
dataset,
|
||||
|
@ -260,12 +261,12 @@ def build_dataloader(dataset: Dataset,
|
|||
query_data_loader=query_data_loader,
|
||||
support_data_loader=support_data_loader)
|
||||
elif isinstance(dataset, TwoBranchDataset):
|
||||
from .dataloader_wrappers import TwoBranchDataloader
|
||||
from mmfewshot.utils import multi_pipeline_collate_fn
|
||||
from .dataloader_wrappers import TwoBranchDataloader
|
||||
|
||||
# `TwoBranchDataset` will return a list of DataContainer
|
||||
# `multi_pipeline_collate_fn` are designed to handle
|
||||
# the data with list[list[DataContainer]]
|
||||
|
||||
# initialize main dataloader
|
||||
main_data_loader = DataLoader(
|
||||
dataset,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[isort]
|
||||
line_length = 79
|
||||
multi_line_output = 0
|
||||
known_standard_library = setuptools
|
||||
extra_standard_library = setuptools
|
||||
known_first_party = mmfewshot
|
||||
known_third_party = cv2,mmcls,mmcv,mmdet,numpy,pytest,pytorch_sphinx_theme,terminaltables,torch,typing_extensions
|
||||
|
||||
|
|
Loading…
Reference in New Issue