Update Github Badge for Workflow ()

* add badge to build.yml

* add rest badges

* update badge in readme

* fix lint

* use html table

* test font size

* update table

* fix lint

* create badge in step

* fix windows badge

* check table

* make badge and text in one line
pull/2095/head
huayuan4396 2023-05-18 14:56:02 +08:00 committed by GitHub
parent 3b62b21fac
commit 8e658cd1bf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 461 additions and 25 deletions

View File

@ -58,6 +58,7 @@ jobs:
echo $(pwd)
ln -s build/bin/mmdeploy_onnx2ncnn ./
python .github/scripts/test_onnx2ncnn.py --run 1
build_ncnn:
runs-on: ubuntu-20.04
strategy:

View File

@ -42,11 +42,22 @@ jobs:
-Dpplnn_DIR=${pplnn_DIR}
ls build/lib
- name: Install mmdeploy with pplnn
id: badge_status
run: |
rm -rf .eggs && python3 -m pip install -e .
export LD_LIBRARY_PATH="/root/workspace/mmdeploy/build/lib:${LD_LIBRARY_PATH}"
python3 tools/check_env.py
python3 -c 'import mmdeploy.apis.pplnn as pplnn_api; assert pplnn_api.is_available()'
- name: create badge
if: always()
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: build_pplnn_cuda
LABEL: 'build'
STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_pplnn_cpu:
runs-on: ubuntu-20.04
@ -56,8 +67,18 @@ jobs:
with:
submodules: 'recursive'
- name: Install mmdeploy with pplnn
id: badge_status
run: |
python -m pip install torch==1.8.2 torchvision==0.9.2 --extra-index-url https://download.pytorch.org/whl/lts/1.8/cpu
python -m pip install mmcv-lite protobuf==3.20.2
python tools/scripts/build_ubuntu_x64_pplnn.py 8
python -c 'import mmdeploy.apis.pplnn as pplnn_api; assert pplnn_api.is_available()'
- name: create badge
if: always()
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: build_pplnn_cpu
LABEL: 'build'
STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
COLOR: ${{ steps.badge_status.conclusion == 'success' && 'blue' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -64,10 +64,20 @@ jobs:
coverage xml
coverage report -m
- name: Run mmyolo deploy unittests
id: badge_status
run: |
python -m pip install xdoctest
cd /home/runner/work/mmyolo
pytest tests/test_deploy
- name: create badge
if: always()
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: build_cpu_model_convert
LABEL: 'build'
STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cpu_sdk:
runs-on: ubuntu-20.04
@ -85,6 +95,7 @@ jobs:
sudo apt install -y ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libxrender-dev libc++1-9 libc++abi1-9
sudo apt install libopencv-dev lcov wget
- name: Build and run SDK unit test without backend
id: badge_status
run: |
mkdir -p build && pushd build
cmake .. -DCMAKE_CXX_COMPILER=g++ -DMMDEPLOY_CODEBASES=all -DMMDEPLOY_BUILD_SDK=ON -DMMDEPLOY_BUILD_SDK_PYTHON_API=OFF -DMMDEPLOY_TARGET_DEVICES=cpu -DMMDEPLOY_COVERAGE=ON -DMMDEPLOY_BUILD_TEST=ON
@ -95,6 +106,15 @@ jobs:
lcov --capture --directory . --output-file coverage.info
ls -lah coverage.info
cp coverage.info ../
- name: create badge
if: always()
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: build_cpu_sdk
LABEL: 'build'
STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
cross_build_aarch64:
runs-on: ubuntu-20.04
@ -110,8 +130,18 @@ jobs:
with:
python-version: 3.8
- name: gcc-multilib
id: badge_status
run: |
sh -ex tools/scripts/ubuntu_cross_build_aarch64.sh
- name: create badge
if: always()
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: cross_build_aarch64
LABEL: 'build'
STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cuda102:
runs-on: ubuntu-20.04
@ -155,10 +185,20 @@ jobs:
rm -rf .eggs && python -m pip install -e .
python tools/check_env.py
- name: Run unittests and generate coverage report
id: badge_status
run: |
coverage run --branch --source mmdeploy -m pytest -rsE tests
coverage xml
coverage report -m
- name: create badge
if: always()
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: build_cuda102
LABEL: 'build'
STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cuda113:
runs-on: ubuntu-20.04
@ -205,6 +245,7 @@ jobs:
coverage xml
coverage report -m
- name: Upload coverage to Codecov
id: badge_status
uses: codecov/codecov-action@v2
with:
file: ./coverage.xml,./coverage.info
@ -212,6 +253,15 @@ jobs:
env_vars: OS,PYTHON,CPLUS
name: codecov-umbrella
fail_ci_if_error: false
- name: create badge
if: always()
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: build_cuda113
LABEL: 'build'
STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cuda113_linux:
runs-on: [self-hosted, linux-3090]
@ -251,10 +301,20 @@ jobs:
export LD_LIBRARY_PATH="/root/workspace/mmdeploy/build/lib:${LD_LIBRARY_PATH}"
python3 tools/check_env.py
- name: Test TensorRT pipeline
id: badge_status
run: |
export LD_LIBRARY_PATH="/root/workspace/mmdeploy/build/lib:${LD_LIBRARY_PATH}"
export LD_LIBRARY_PATH="/root/workspace/mmdeploy/mmdeploy/lib:${LD_LIBRARY_PATH}"
bash .github/scripts/linux/test_full_pipeline.sh trt cuda
- name: create badge
if: always()
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: build_cuda113_linux
LABEL: 'build'
STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cuda113_windows:
runs-on: [self-hosted, win10-3080]
@ -316,3 +376,17 @@ jobs:
conda activate $pwd\tmp_env
$env:path = "$pwd\build\bin\Release;" + $env:path
.github\scripts\windows\test_full_pipeline.ps1 -Backend trt -Device cuda
badge_build_cuda113_windows:
needs: build_cuda113_windows
if: always()
runs-on: ubuntu-20.04
steps:
- name: create badge
uses: RubbaBoy/BYOB@v1.2.1
with:
NAME: build_cuda113_windows
LABEL: 'build'
STATUS: ${{ needs.build_cuda113_windows.result == 'success' && 'passing' || needs.build_cuda113_windows.result }}
COLOR: ${{ needs.build_cuda113_windows.result == 'success' && 'green' || 'red' }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

196
README.md
View File

@ -83,18 +83,188 @@ The supported Device-Platform-InferenceBackend matrix is presented as following,
The benchmark can be found from [here](docs/en/03-benchmark/benchmark.md)
| Device / Platform | Linux | Windows | macOS | Android |
| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| x86_64 CPU | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ort.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ort.yml)ONNXRuntime<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-pplnn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml)pplnn<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ncnn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml)ncnn<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-torchscript.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-torchscript.yml)LibTorch<br>![](https://img.shields.io/badge/build-no%20status-lightgrey)OpenVINO<br>![](https://img.shields.io/badge/build-no%20status-lightgrey)TVM<br> | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime<br>![](https://img.shields.io/badge/build-no%20status-lightgrey)OpenVINO<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ncnn<br> | - | - |
| ARM CPU | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ncnn<br> | - | - | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-rknn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)ncnn<br> |
| RISC-V | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/linux-riscv64-gcc.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/linux-riscv64-gcc.yml)ncnn<br> | - | - | - |
| NVIDIA GPU | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)TensorRT<br>![](https://img.shields.io/badge/build-no%20status-lightgrey)LibTorch<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-pplnn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml)pplnn<br> | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)TensorRT<br> | - | - |
| NVIDIA Jetson | ![](https://img.shields.io/badge/build-no%20status-lightgrey)TensorRT<br> | - | - | - |
| Huawei ascend310 | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ascend.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ascend.yml)CANN<br> | - | - | - |
| Rockchip | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-rknn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)RKNN<br> | - | - | - |
| Apple M1 | - | - | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-coreml.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-coreml.yml)CoreML<br> | - |
| Adreno GPU | - | - | - | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-snpe.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml)SNPE<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-rknn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)ncnn<br> |
| Hexagon DSP | - | - | - | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-snpe.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml)SNPE<br> |
<div style="width: fit-content; margin: auto;">
<table>
<tr>
<th>Device / <br> Platform</th>
<th>Linux</th>
<th>Windows</th>
<th>macOS</th>
<th>Android</th>
</tr>
<tr>
<th>x86_64 <br> CPU</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ort.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ort.yml"></a></sub> <sub>onnxruntime</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-pplnn.yml"></a></sub> <sub>pplnn</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ncnn.yml"></a></sub> <sub>ncnn</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-torchscript.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-torchscript.yml"></a></sub> <sub>LibTorch</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>OpenVINO</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>TVM</sub> <br>
</td>
<td>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>onnxruntime</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>OpenVINO</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>ncnn</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>ARM <br> CPU</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/cross_build_aarch64"></a></sub> <sub>ncnn</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ncnn.yml"></a></sub> <sub>ncnn</sub> <br>
</td>
</tr>
<tr>
<th>RISC-V</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/linux-riscv64-gcc.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/linux-riscv64-gcc.yml"></a></sub> <sub>ncnn</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>NVIDIA <br> GPU</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/build_cuda113_linux"></a></sub> <sub>onnxruntime</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/build_cuda113_linux"></a></sub> <sub>TensorRT</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>LibTorch</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-pplnn.yml"></a></sub> <sub>pplnn</sub> <br>
</td>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/build_cuda113_windows"></a></sub> <sub>onnxruntime</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/build_cuda113_windows"></a></sub> <sub>TensorRT</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>NVIDIA <br> Jetson</th>
<td>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>TensorRT</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>Huawei <br> ascend310</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ascend.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ascend.yml"></a></sub> <sub>CANN</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>Rockchip</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-rknn.yml"></a></sub> <sub>RKNN</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>Apple M1</th>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-coreml.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-coreml.yml"></a></sub> <sub>CoreML</sub> <br>
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>Adreno <br> GPU</th>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-snpe.yml"></a></sub> <sub>SNPE</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ncnn.yml"></a></sub> <sub>ncnn</sub> <br>
</td>
</tr>
<tr>
<th>Hexagon <br> DSP</th>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-snpe.yml"></a></sub> <sub>SNPE</sub> <br>
</td>
</tr>
</table>
</div>
### Efficient and scalable C/C++ SDK Framework
@ -130,7 +300,7 @@ Please read [getting_started](docs/en/get_started.md) for the basic usage of MMD
- [How to do regression test](docs/en/07-developer-guide/regression_test.md)
- Custom Backend Ops
- [ncnn](docs/en/06-custom-ops/ncnn.md)
- [onnxruntime](docs/en/06-custom-ops/onnxruntime.md)
- [ONNXRuntime](docs/en/06-custom-ops/onnxruntime.md)
- [tensorrt](docs/en/06-custom-ops/tensorrt.md)
- [FAQ](docs/en/faq.md)
- [Contributing](.github/CONTRIBUTING.md)

View File

@ -66,18 +66,188 @@ MMDeploy 是 [OpenMMLab](https://openmmlab.com/) 模型部署工具箱,**为
支持的设备平台和推理引擎如下表所示。benchmark请参考[这里](docs/zh_cn/03-benchmark/benchmark.md)
| Device / Platform | Linux | Windows | macOS | Android |
| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| x86_64 CPU | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ort.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ort.yml)ONNXRuntime<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-pplnn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml)pplnn<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ncnn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml)ncnn<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-torchscript.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-torchscript.yml)LibTorch<br>![](https://img.shields.io/badge/build-no%20status-lightgrey)OpenVINO<br>![](https://img.shields.io/badge/build-no%20status-lightgrey)TVM<br> | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime<br>![](https://img.shields.io/badge/build-no%20status-lightgrey)OpenVINO<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ncnn<br> | - | - |
| ARM CPU | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ncnn<br> | - | - | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-rknn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)ncnn<br> |
| RISC-V | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/linux-riscv64-gcc.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/linux-riscv64-gcc.yml)ncnn<br> | - | - | - |
| NVIDIA GPU | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)TensorRT<br>![](https://img.shields.io/badge/build-no%20status-lightgrey)LibTorch<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-pplnn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml)pplnn<br> | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/build.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)TensorRT<br> | - | - |
| NVIDIA Jetson | ![](https://img.shields.io/badge/build-no%20status-lightgrey)TensorRT<br> | - | - | - |
| Huawei ascend310 | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ascend.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ascend.yml)CANN<br> | - | - | - |
| Rockchip | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-rknn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)RKNN<br> | - | - | - |
| Apple M1 | - | - | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-coreml.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-coreml.yml)CoreML<br> | - |
| Adreno GPU | - | - | - | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-snpe.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml)SNPE<br>[![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-rknn.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)ncnn<br> |
| Hexagon DSP | - | - | - | [![](https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-snpe.yml)](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml)SNPE<br> |
<div style="width: fit-content; margin: auto;">
<table>
<tr>
<th>Device / <br> Platform</th>
<th>Linux</th>
<th>Windows</th>
<th>macOS</th>
<th>Android</th>
</tr>
<tr>
<th>x86_64 <br> CPU</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ort.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ort.yml"></a></sub> <sub>onnxruntime</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-pplnn.yml"></a></sub> <sub>pplnn</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ncnn.yml"></a></sub> <sub>ncnn</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-torchscript.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-torchscript.yml"></a></sub> <sub>LibTorch</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>OpenVINO</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>TVM</sub> <br>
</td>
<td>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>onnxruntime</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>OpenVINO</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>ncnn</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>ARM <br> CPU</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/cross_build_aarch64"></a></sub> <sub>ncnn</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ncnn.yml"></a></sub> <sub>ncnn</sub> <br>
</td>
</tr>
<tr>
<th>RISC-V</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/linux-riscv64-gcc.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/linux-riscv64-gcc.yml"></a></sub> <sub>ncnn</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>NVIDIA <br> GPU</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/build_cuda113_linux"></a></sub> <sub>onnxruntime</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/build_cuda113_linux"></a></sub> <sub>TensorRT</sub> <br>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>LibTorch</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-pplnn.yml"></a></sub> <sub>pplnn</sub> <br>
</td>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/build_cuda113_windows"></a></sub> <sub>onnxruntime</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml"><img src="https://byob.yarr.is/open-mmlab/mmdeploy/build_cuda113_windows"></a></sub> <sub>TensorRT</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>NVIDIA <br> Jetson</th>
<td>
<sub><img src="https://img.shields.io/badge/build-no%20status-lightgrey"></sub> <sub>TensorRT</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>Huawei <br> ascend310</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ascend.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ascend.yml"></a></sub> <sub>CANN</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>Rockchip</th>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-rknn.yml"></a></sub> <sub>RKNN</sub> <br>
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>Apple M1</th>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-coreml.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-coreml.yml"></a></sub> <sub>CoreML</sub> <br>
</td>
<td align="center">
-
</td>
</tr>
<tr>
<th>Adreno <br> GPU</th>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-snpe.yml"></a></sub> <sub>SNPE</sub> <br>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-ncnn.yml"></a></sub> <sub>ncnn</sub> <br>
</td>
</tr>
<tr>
<th>Hexagon <br> DSP</th>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td align="center">
-
</td>
<td>
<sub><a href="https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml"><img src="https://img.shields.io/github/actions/workflow/status/open-mmlab/mmdeploy/backend-snpe.yml"></a></sub> <sub>SNPE</sub> <br>
</td>
</tr>
</table>
</div>
### SDK 可高度定制化