mmdeploy/tests/test_apis/test_onnx2openvino.py
Semyon Bevzyuk c5a87fb1bc
[Enhancement]: OpenVINO deployment support for some models (#117)
* Fix include and lib paths for onnxruntime.

* Fixes for SSD export test

* Add onnx2openvino and OpenVINODetector. Test models: ssd, retinanet, fcos, fsaf.

* Add support for two-stage models: faster_rcnn, cascade_rcnn

* Add doc

* Add strip_doc_string for openvino.

* Fix openvino preprocess.

* Add OpenVINO to test_wrapper.py.

* Fix

* Add openvino_execute.

* Removed preprocessing.

* Fix onnxruntime cmake.

* Rewrote postprocessing and forward, added docstrings and fixes.

* Added device type change to OpenVINOWrapper.

* Update forward_of_single_roi_extractor_dynamic_openvino and fix doc.

* Update docs.

* Add OpenVINODetector and onn2openvino tests.

* Add input_info to onnx2openvino.

* Add TestOpenVINOExporter and test_single_roi_extractor.

* Moved get_input_shape_from_cfg to openvino_utils.py and added test.

* Added test_cascade_roi_head.

* Add backend.check_env() to tests.

* Add OpenVINO to get_rewrite_outputs and to some tests in test_mmdet_models.

* Moved test_single_roi_extractor to test_mmdet_models.

* Removed TestOpenVINOExporter.
2021-10-21 16:07:35 +08:00

118 lines
3.7 KiB
Python

import os
import os.path as osp
import tempfile
import numpy as np
import pytest
import torch
import torch.nn as nn
from mmdeploy.apis.openvino import is_available
openvino_skip = not is_available()
@pytest.mark.skip(reason='This a not test class but a utility class.')
class TestModel(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x * 0.5
def generate_onnx_file(model, export_img, onnx_file):
with torch.no_grad():
dynamic_axes = {
'input': {
0: 'batch',
2: 'width',
3: 'height'
},
'output': {
0: 'batch'
}
}
torch.onnx.export(
model,
export_img,
onnx_file,
output_names=['output'],
input_names=['input'],
keep_initializers_as_inputs=True,
do_constant_folding=True,
verbose=False,
opset_version=11,
dynamic_axes=dynamic_axes)
assert osp.exists(onnx_file)
def get_outputs(pytorch_model, openvino_model_path, input):
output_pytorch = pytorch_model(input).numpy()
from mmdeploy.apis.openvino import OpenVINOWrapper
openvino_model = OpenVINOWrapper(openvino_model_path)
openvino_output = openvino_model({'input': input})['output']
return output_pytorch, openvino_output
@pytest.mark.skipif(openvino_skip, reason='OpenVINO not avaiable')
def test_onnx2openvino():
from mmdeploy.apis.openvino import get_output_model_file, onnx2openvino
pytorch_model = TestModel().eval()
export_img = torch.rand([1, 3, 8, 8])
onnx_file = tempfile.NamedTemporaryFile(suffix='.onnx').name
generate_onnx_file(pytorch_model, export_img, onnx_file)
input_info = {'input': export_img.shape}
output_names = ['output']
openvino_dir = tempfile.TemporaryDirectory().name
onnx2openvino(input_info, output_names, onnx_file, openvino_dir)
openvino_model_path = get_output_model_file(onnx_file, openvino_dir)
assert osp.exists(openvino_model_path), \
'The file (.xml) for OpenVINO IR has not been created.'
test_img = torch.rand([1, 3, 16, 16])
output_pytorch, openvino_output = get_outputs(pytorch_model,
openvino_model_path,
test_img)
assert np.allclose(output_pytorch, openvino_output), \
'OpenVINO and PyTorch outputs are not the same.'
@pytest.mark.skipif(openvino_skip, reason='OpenVINO not avaiable')
def test_can_not_run_onnx2openvino_without_mo():
current_environ = dict(os.environ)
os.environ.clear()
is_error = False
try:
from mmdeploy.apis.openvino import onnx2openvino
onnx2openvino({}, ['output'], 'tmp.onnx', '/tmp')
except RuntimeError:
is_error = True
os.environ.update(current_environ)
assert is_error, \
'The onnx2openvino script was launched without checking for MO.'
@pytest.mark.skipif(openvino_skip, reason='OpenVINO not avaiable')
def test_get_input_shape_from_cfg():
from mmdeploy.apis.openvino import get_input_shape_from_cfg
# Test with default value
model_cfg = {}
input_shape = get_input_shape_from_cfg(model_cfg)
assert input_shape == [1, 3, 800, 1344], \
'The function returned a different default shape.'
# Test with config that contains the required data.
height, width = 800, 1200
model_cfg = {'test_pipeline': [{}, {'img_scale': (width, height)}]}
input_shape = get_input_shape_from_cfg(model_cfg)
assert input_shape == [1, 3, height, width], \
'The shape in the config does not match the output shape.'