2022-02-16 11:03:12 +08:00
|
|
|
# Copyright (c) OpenMMLab. All rights reserved.
|
|
|
|
import numpy as np
|
|
|
|
import pytest
|
|
|
|
import torch
|
|
|
|
|
|
|
|
from mmdeploy.codebase import import_codebase
|
2022-09-19 21:40:53 +08:00
|
|
|
from mmdeploy.utils import Backend, Codebase
|
2022-02-16 11:03:12 +08:00
|
|
|
from mmdeploy.utils.test import WrapModel, check_backend, get_rewrite_outputs
|
|
|
|
|
|
|
|
try:
|
|
|
|
import_codebase(Codebase.MMPOSE)
|
|
|
|
except ImportError:
|
|
|
|
pytest.skip(
|
|
|
|
f'{Codebase.MMPOSE} is not installed.', allow_module_level=True)
|
|
|
|
|
2022-09-19 21:40:53 +08:00
|
|
|
from .utils import generate_mmpose_deploy_config # noqa: E402
|
|
|
|
from .utils import generate_mmpose_task_processor # noqa: E402
|
2022-02-16 11:03:12 +08:00
|
|
|
|
2022-09-19 21:40:53 +08:00
|
|
|
|
|
|
|
def get_heatmap_head():
|
|
|
|
from mmpose.models.heads import HeatmapHead
|
|
|
|
|
|
|
|
model = HeatmapHead(
|
2022-02-16 11:03:12 +08:00
|
|
|
2,
|
|
|
|
4,
|
2022-09-19 21:40:53 +08:00
|
|
|
deconv_out_channels=(16, 16, 16),
|
|
|
|
loss=dict(type='KeypointMSELoss', use_target_weight=False))
|
2022-02-16 11:03:12 +08:00
|
|
|
model.requires_grad_(False)
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
2022-09-19 21:40:53 +08:00
|
|
|
@pytest.mark.parametrize('backend_type', [Backend.ONNXRUNTIME])
|
2022-11-03 15:15:33 +08:00
|
|
|
def test_heatmaphead_forward(backend_type: Backend):
|
2022-02-16 11:03:12 +08:00
|
|
|
check_backend(backend_type, True)
|
2022-09-19 21:40:53 +08:00
|
|
|
model = get_heatmap_head()
|
2022-02-16 11:03:12 +08:00
|
|
|
model.cpu().eval()
|
2022-09-19 21:40:53 +08:00
|
|
|
deploy_cfg = generate_mmpose_deploy_config(backend_type.value)
|
|
|
|
feats = [torch.rand(1, 2, 32, 48)]
|
2022-11-03 15:15:33 +08:00
|
|
|
wrapped_model = WrapModel(model, 'forward')
|
2022-09-19 21:40:53 +08:00
|
|
|
rewrite_inputs = {'feats': feats}
|
|
|
|
rewrite_outputs, _ = get_rewrite_outputs(
|
2022-02-16 11:03:12 +08:00
|
|
|
wrapped_model=wrapped_model,
|
|
|
|
model_inputs=rewrite_inputs,
|
2022-09-19 21:40:53 +08:00
|
|
|
deploy_cfg=deploy_cfg,
|
|
|
|
run_with_backend=False)
|
|
|
|
assert isinstance(rewrite_outputs, torch.Tensor)
|
2022-02-16 11:03:12 +08:00
|
|
|
|
|
|
|
|
2022-09-19 21:40:53 +08:00
|
|
|
def get_msmu_head():
|
|
|
|
from mmpose.models.heads import MSPNHead
|
|
|
|
model = MSPNHead(
|
2022-10-18 10:24:19 +08:00
|
|
|
num_stages=1,
|
|
|
|
num_units=1,
|
|
|
|
out_shape=(32, 48),
|
|
|
|
unit_channels=16,
|
|
|
|
level_indices=[1])
|
2022-02-16 11:03:12 +08:00
|
|
|
model.requires_grad_(False)
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
2022-09-19 21:40:53 +08:00
|
|
|
@pytest.mark.parametrize('backend_type', [Backend.ONNXRUNTIME])
|
2022-11-03 15:15:33 +08:00
|
|
|
def test_msmuhead_forward(backend_type: Backend):
|
2022-02-16 11:03:12 +08:00
|
|
|
check_backend(backend_type, True)
|
2022-09-19 21:40:53 +08:00
|
|
|
model = get_msmu_head()
|
2022-02-16 11:03:12 +08:00
|
|
|
model.cpu().eval()
|
2022-09-19 21:40:53 +08:00
|
|
|
deploy_cfg = generate_mmpose_deploy_config(backend_type.value)
|
|
|
|
feats = [[torch.rand(1, 16, 32, 48)]]
|
2022-11-03 15:15:33 +08:00
|
|
|
wrapped_model = WrapModel(model, 'forward')
|
2022-09-19 21:40:53 +08:00
|
|
|
rewrite_inputs = {'feats': feats}
|
|
|
|
rewrite_outputs, _ = get_rewrite_outputs(
|
2022-02-16 11:03:12 +08:00
|
|
|
wrapped_model=wrapped_model,
|
|
|
|
model_inputs=rewrite_inputs,
|
2022-09-19 21:40:53 +08:00
|
|
|
deploy_cfg=deploy_cfg,
|
|
|
|
run_with_backend=False)
|
|
|
|
assert isinstance(rewrite_outputs, torch.Tensor)
|
2022-02-16 11:03:12 +08:00
|
|
|
|
|
|
|
|
|
|
|
def get_cross_resolution_weighting_model():
|
|
|
|
from mmpose.models.backbones.litehrnet import CrossResolutionWeighting
|
|
|
|
|
|
|
|
class DummyModel(torch.nn.Module):
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
|
|
|
self.model = CrossResolutionWeighting([16, 16], ratio=8)
|
|
|
|
|
|
|
|
def forward(self, x):
|
|
|
|
assert isinstance(x, torch.Tensor)
|
|
|
|
return self.model([x, x])
|
|
|
|
|
|
|
|
model = DummyModel()
|
|
|
|
model.requires_grad_(False)
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
2022-09-19 21:40:53 +08:00
|
|
|
@pytest.mark.parametrize('backend_type', [Backend.ONNXRUNTIME])
|
2022-02-16 11:03:12 +08:00
|
|
|
def test_cross_resolution_weighting_forward(backend_type: Backend):
|
|
|
|
check_backend(backend_type, True)
|
|
|
|
model = get_cross_resolution_weighting_model()
|
|
|
|
model.cpu().eval()
|
|
|
|
imgs = torch.rand(1, 16, 16, 16)
|
2022-09-19 21:40:53 +08:00
|
|
|
deploy_cfg = generate_mmpose_deploy_config(backend_type.value)
|
2022-02-16 11:03:12 +08:00
|
|
|
rewrite_inputs = {'x': imgs}
|
|
|
|
model_outputs = model.forward(imgs)
|
|
|
|
wrapped_model = WrapModel(model, 'forward')
|
|
|
|
rewrite_outputs, is_backend_output = get_rewrite_outputs(
|
|
|
|
wrapped_model=wrapped_model,
|
|
|
|
model_inputs=rewrite_inputs,
|
|
|
|
deploy_cfg=deploy_cfg)
|
|
|
|
if isinstance(rewrite_outputs, dict):
|
|
|
|
rewrite_outputs = rewrite_outputs['output']
|
|
|
|
for model_output, rewrite_output in zip(model_outputs, rewrite_outputs):
|
|
|
|
model_output = model_output.cpu().numpy()
|
|
|
|
if isinstance(rewrite_output, torch.Tensor):
|
|
|
|
rewrite_output = rewrite_output.detach().cpu().numpy()
|
|
|
|
assert np.allclose(
|
|
|
|
model_output, rewrite_output, rtol=1e-03, atol=1e-05)
|
|
|
|
|
|
|
|
|
2022-09-19 21:40:53 +08:00
|
|
|
@pytest.mark.parametrize('backend_type', [Backend.ONNXRUNTIME])
|
|
|
|
def test_estimator_forward(backend_type: Backend):
|
2022-02-16 11:03:12 +08:00
|
|
|
check_backend(backend_type, True)
|
2022-09-19 21:40:53 +08:00
|
|
|
deploy_cfg = generate_mmpose_deploy_config(backend_type.value)
|
|
|
|
task_processor = generate_mmpose_task_processor(deploy_cfg=deploy_cfg)
|
|
|
|
model = task_processor.build_pytorch_model()
|
|
|
|
model.requires_grad_(False)
|
2022-02-16 11:03:12 +08:00
|
|
|
model.cpu().eval()
|
2022-09-19 21:40:53 +08:00
|
|
|
wrapped_model = WrapModel(model, 'forward', data_samples=None)
|
|
|
|
rewrite_inputs = {'inputs': torch.rand(1, 3, 256, 192)}
|
|
|
|
rewrite_outputs, _ = get_rewrite_outputs(
|
2022-02-16 11:03:12 +08:00
|
|
|
wrapped_model=wrapped_model,
|
|
|
|
model_inputs=rewrite_inputs,
|
2022-09-19 21:40:53 +08:00
|
|
|
run_with_backend=False,
|
2022-02-16 11:03:12 +08:00
|
|
|
deploy_cfg=deploy_cfg)
|
2022-09-19 21:40:53 +08:00
|
|
|
assert isinstance(rewrite_outputs, torch.Tensor)
|