[Fix] Fix inferencer gets wrong configs path (#996)

* [Fix] Fix inferencer get wrong configs path

* Update CI

* Fix indent

* Fix CI arguments

* gpu test in CI

gpu test in CI

* require lint

* Adjust pytorch version and cuda version

* Fix docker

* Fix docker syntax

* Use bach -c

* Use bach -c

* Replace is_installed with is_imported

* Fix

* Fix PYTHONPATH
This commit is contained in:
Mashiro 2023-03-14 18:28:33 +08:00 committed by GitHub
parent 789330e2ac
commit ad33a7d0e5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 76 additions and 7 deletions

View File

@ -134,6 +134,57 @@ jobs:
command: | command: |
docker exec mmengine python -m pytest tests/ docker exec mmengine python -m pytest tests/
build_integration_test:
parameters:
torch:
type: string
cuda:
type: string
cudnn:
type: integer
default: 7
machine:
image: ubuntu-2004-cuda-11.4:202110-01
docker_layer_caching: true
resource_class: gpu.nvidia.small
steps:
- checkout
- run:
name: Build Docker image
command: |
docker build .circleci/docker -t mmengine:gpu --build-arg PYTORCH=<< parameters.torch >> --build-arg CUDA=<< parameters.cuda >> --build-arg CUDNN=<< parameters.cudnn >>
docker run --gpus all -t -d -v /home/circleci/project:/mmengine -w /mmengine --name mmengine mmengine:gpu
- run:
name: Build MMEngine from source
command: |
docker exec mmengine pip install -e . -v
- run:
name: Install unit tests dependencies
command: |
docker exec mmengine pip install -r requirements/tests.txt
docker exec mmengine pip install openmim
docker exec mmengine mim install 'mmcv>=2.0.0rc1'
- run:
name: Install down stream repositories
command: |
docker exec mmengine mim install 'mmdet>=3.0.0rc0'
- run:
name: Run integration tests
command: |
docker exec mmengine pytest tests/test_infer/test_infer.py
- run:
name: Install down stream repositories from source
# TODO: Switch to master branch
command: |
docker exec mmengine pip uninstall mmdet -y
docker exec mmengine apt install git -y
docker exec mmengine mkdir downstream_repos
docker exec mmengine git clone -b 3.x https://github.com/open-mmlab/mmdetection.git ./downstream_repos/mmdetection
- run:
name: Run inferencer tests
command: |
docker exec -e PYTHONPATH=./downstream_repos/mmdetection mmengine pytest tests/test_infer/test_infer.py
workflows: workflows:
pr_stage_lint: pr_stage_lint:
when: << pipeline.parameters.lint_only >> when: << pipeline.parameters.lint_only >>
@ -173,10 +224,20 @@ workflows:
python: 3.9.0 python: 3.9.0
requires: requires:
- minimum_version_cpu - minimum_version_cpu
- hold_integration_test:
type: approval
requires:
- lint
- build_integration_test:
name: integration_test
torch: 1.8.1
cuda: "10.2"
requires:
- hold_integration_test
- hold: - hold:
type: approval type: approval
requires: requires:
- maximum_version_cpu - lint
- build_cuda: - build_cuda:
name: mainstream_version_gpu name: mainstream_version_gpu
torch: 1.8.1 torch: 1.8.1

View File

@ -419,9 +419,9 @@ class BaseInferencer(metaclass=InferencerMeta):
return repo_dir return repo_dir
else: else:
mim_dir = osp.join(package_path, '.mim') mim_dir = osp.join(package_path, '.mim')
if not osp.exists(osp.join(mim_dir, 'Configs')): if not osp.exists(osp.join(mim_dir, 'configs')):
raise FileNotFoundError( raise FileNotFoundError(
f'Cannot find Configs directory in {package_path}!, ' f'Cannot find `configs` directory in {package_path}!, '
f'please check the completeness of the {scope}.') f'please check the completeness of the {scope}.')
return mim_dir return mim_dir

View File

@ -10,10 +10,18 @@ import torch
from mmengine.infer import BaseInferencer from mmengine.infer import BaseInferencer
from mmengine.registry import VISUALIZERS, DefaultScope from mmengine.registry import VISUALIZERS, DefaultScope
from mmengine.testing import RunnerTestCase from mmengine.testing import RunnerTestCase
from mmengine.utils import is_installed, is_list_of from mmengine.utils import is_list_of
from mmengine.visualization import Visualizer from mmengine.visualization import Visualizer
def is_imported(package):
try:
__import__(package)
return True
except ImportError:
return False
class ToyInferencer(BaseInferencer): class ToyInferencer(BaseInferencer):
preprocess_kwargs = {'pre_arg'} preprocess_kwargs = {'pre_arg'}
forward_kwargs = {'for_arg'} forward_kwargs = {'for_arg'}
@ -98,7 +106,7 @@ class TestBaseInferencer(RunnerTestCase):
ToyInferencer([self.epoch_based_cfg], self.ckpt_path) ToyInferencer([self.epoch_based_cfg], self.ckpt_path)
# Pass model as model name defined in metafile # Pass model as model name defined in metafile
if is_installed('mmdet'): if is_imported('mmdet'):
from mmdet.utils import register_all_modules from mmdet.utils import register_all_modules
register_all_modules() register_all_modules()
@ -126,7 +134,7 @@ class TestBaseInferencer(RunnerTestCase):
inferencer(img_paths) inferencer(img_paths)
@pytest.mark.skipif( @pytest.mark.skipif(
not is_installed('mmdet'), reason='mmdet is not installed') not is_imported('mmdet'), reason='mmdet is not installed')
def test_load_model_from_meta(self): def test_load_model_from_meta(self):
from mmdet.utils import register_all_modules from mmdet.utils import register_all_modules
@ -210,7 +218,7 @@ class TestBaseInferencer(RunnerTestCase):
self.assertTrue(is_list_of(data, torch.Tensor)) self.assertTrue(is_list_of(data, torch.Tensor))
@pytest.mark.skipif( @pytest.mark.skipif(
not is_installed('mmdet'), reason='mmdet is not installed') not is_imported('mmdet'), reason='mmdet is not installed')
def test_list_models(self): def test_list_models(self):
model_list = BaseInferencer.list_models('mmdet') model_list = BaseInferencer.list_models('mmdet')
self.assertTrue(len(model_list) > 0) self.assertTrue(len(model_list) > 0)