[Refactor] Rename mmdeploy_python to mmdeploy_runtime (#1911)
* [Feature]: Add github prebuild workflow after new release. (#1852) * add prebuild dockerfile * add prebuild test workflw * update * update * rm other workflow for test * Update docker image * add win1o prebuild * add test prebuild * add windows scripts in prebuilt package * add linux scripts in prebuilt package * generate_build_config.py * fix cudnn search * fix env * fix script * fix rpath * fix cwd * fix windows * fix lint * windows prebuild ci * linux prebuild ci * fix * update trigger * Revert "rm other workflow for test" This reverts commitpull/1944/head0a03872750
. * update sdk build readme * update prebuild * fix dll deps for python >= 3.8 on windows * fix ci * test prebuild * update test script to avoid modify upload folder * add onnxruntime.dll to mmdeploy_python * update prebuild workflow * update prebuild * Update loader.cpp.in * remove exists prebuild files * fix opencv env * update cmake options for mmdeploy python build * remove test code * fix lint --------- Co-authored-by: RunningLeon <mnsheng@yeah.net> Co-authored-by: RunningLeon <maningsheng@sensetime.com> * rename mmdeploy_python -> mmdeploy_runtime * test master prebuild * fix trt net build * Revert "test master prebuild" This reverts commitaad5258648
. * add master branch * fix linux set_env script * update package_tools docs * fix gcc 7.3 aligned_alloc * comment temporarily as text_det_recog can't be built with prebuild package built under manylinux --------- Co-authored-by: RunningLeon <mnsheng@yeah.net> Co-authored-by: RunningLeon <maningsheng@sensetime.com>
parent
c7003bb76a
commit
aae9f32623
|
@ -0,0 +1,234 @@
|
|||
name: prebuild
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- dev-1.x
|
||||
- master
|
||||
paths:
|
||||
- "mmdeploy/version.py"
|
||||
|
||||
permissions: write-all
|
||||
|
||||
jobs:
|
||||
linux_build:
|
||||
runs-on: [self-hosted, linux-3090]
|
||||
container:
|
||||
image: openmmlab/mmdeploy:manylinux2014_x86_64-cuda11.3
|
||||
options: "--gpus=all --ipc=host"
|
||||
volumes:
|
||||
- /data2/actions-runner/prebuild:/__w/mmdeploy/prebuild
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Get mmdeploy version
|
||||
run: |
|
||||
export MMDEPLOY_VERSION=$(python3 -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
|
||||
echo $MMDEPLOY_VERSION
|
||||
echo "MMDEPLOY_VERSION=$MMDEPLOY_VERSION" >> $GITHUB_ENV
|
||||
- name: Build MMDeploy
|
||||
run: |
|
||||
source activate mmdeploy-3.6
|
||||
pip install pyyaml packaging setuptools wheel
|
||||
mkdir pack; cd pack
|
||||
python ../tools/package_tools/generate_build_config.py --backend 'trt;ort' \
|
||||
--system linux --output config.yml --build-mmdeploy
|
||||
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
|
||||
- name: Build sdk cpu backend
|
||||
run: |
|
||||
source activate mmdeploy-3.6
|
||||
cd pack
|
||||
python ../tools/package_tools/generate_build_config.py --backend 'ort' \
|
||||
--system linux --output config.yml --device cpu --build-sdk --build-sdk-monolithic \
|
||||
--build-sdk-python --sdk-dynamic-net
|
||||
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
|
||||
- name: Build sdk cuda backend
|
||||
run: |
|
||||
source activate mmdeploy-3.6
|
||||
cd pack
|
||||
python ../tools/package_tools/generate_build_config.py --backend 'ort;trt' \
|
||||
--system linux --output config.yml --device cuda --build-sdk --build-sdk-monolithic \
|
||||
--build-sdk-python --sdk-dynamic-net
|
||||
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
|
||||
- name: Move artifact
|
||||
run: |
|
||||
mkdir -p /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION
|
||||
rm -rf /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION/*
|
||||
mv pack/* /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION
|
||||
|
||||
linux_test:
|
||||
runs-on: [self-hosted, linux-3090]
|
||||
needs: linux_build
|
||||
container:
|
||||
image: openmmlab/mmdeploy:ubuntu20.04-cuda11.3
|
||||
options: "--gpus=all --ipc=host"
|
||||
volumes:
|
||||
- /data2/actions-runner/prebuild:/__w/mmdeploy/prebuild
|
||||
- /data2/actions-runner/testmodel:/__w/mmdeploy/testmodel
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Get mmdeploy version
|
||||
run: |
|
||||
export MMDEPLOY_VERSION=$(python3 -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
|
||||
echo $MMDEPLOY_VERSION
|
||||
echo "MMDEPLOY_VERSION=$MMDEPLOY_VERSION" >> $GITHUB_ENV
|
||||
- name: Test python
|
||||
run: |
|
||||
cd /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION
|
||||
bash $GITHUB_WORKSPACE/tools/package_tools/test/test_sdk_python.sh
|
||||
- name: Test c/cpp
|
||||
run: |
|
||||
cd /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION
|
||||
bash $GITHUB_WORKSPACE/tools/package_tools/test/test_sdk.sh
|
||||
|
||||
linux_upload:
|
||||
runs-on: [self-hosted, linux-3090]
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
environment: 'prod'
|
||||
needs: linux_test
|
||||
env:
|
||||
PREBUILD_DIR: /data2/actions-runner/prebuild
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Get mmdeploy version
|
||||
run: |
|
||||
export MMDEPLOY_VERSION=$(python3 -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
|
||||
echo $MMDEPLOY_VERSION
|
||||
echo "MMDEPLOY_VERSION=$MMDEPLOY_VERSION" >> $GITHUB_ENV
|
||||
- name: Upload mmdeploy
|
||||
run: |
|
||||
cd $PREBUILD_DIR/$MMDEPLOY_VERSION/mmdeploy
|
||||
pip install twine
|
||||
# twine upload * --repository testpypi -u __token__ -p ${{ secrets.test_pypi_password }}
|
||||
twine upload * -u __token__ -p ${{ secrets.pypi_password }}
|
||||
- name: Upload mmdeploy_runtime
|
||||
run: |
|
||||
cd $PREBUILD_DIR/$MMDEPLOY_VERSION/mmdeploy_runtime
|
||||
# twine upload * --repository testpypi -u __token__ -p ${{ secrets.test_pypi_password }}
|
||||
twine upload * -u __token__ -p ${{ secrets.pypi_password }}
|
||||
- name: Zip mmdeploy sdk
|
||||
run: |
|
||||
cd $PREBUILD_DIR/$MMDEPLOY_VERSION/sdk
|
||||
for folder in *
|
||||
do
|
||||
tar czf $folder.tar.gz $folder
|
||||
done
|
||||
- name: Upload mmdeploy sdk
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: |
|
||||
$PREBUILD_DIR/$MMDEPLOY_VERSION/sdk/*.tar.gz
|
||||
|
||||
|
||||
windows_build:
|
||||
runs-on: [self-hosted, win10-3080]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Get mmdeploy version
|
||||
run: |
|
||||
conda activate mmdeploy-3.8
|
||||
$env:MMDEPLOY_VERSION=(python -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
|
||||
echo $env:MMDEPLOY_VERSION
|
||||
echo "MMDEPLOY_VERSION=$env:MMDEPLOY_VERSION" >> $env:GITHUB_ENV
|
||||
- name: Build MMDeploy
|
||||
run: |
|
||||
. D:\DEPS\cienv\prebuild_gpu_env.ps1
|
||||
conda activate mmdeploy-3.6
|
||||
mkdir pack; cd pack
|
||||
python ../tools/package_tools/generate_build_config.py --backend 'trt;ort' `
|
||||
--system windows --output config.yml --build-mmdeploy
|
||||
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
|
||||
- name: Build sdk cpu backend
|
||||
run: |
|
||||
. D:\DEPS\cienv\prebuild_cpu_env.ps1
|
||||
conda activate mmdeploy-3.6
|
||||
cd pack
|
||||
python ../tools/package_tools/generate_build_config.py --backend 'ort' `
|
||||
--system windows --output config.yml --device cpu --build-sdk --build-sdk-monolithic `
|
||||
--build-sdk-python --sdk-dynamic-net
|
||||
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
|
||||
- name: Build sdk cuda backend
|
||||
run: |
|
||||
. D:\DEPS\cienv\prebuild_gpu_env.ps1
|
||||
conda activate mmdeploy-3.6
|
||||
cd pack
|
||||
python ../tools/package_tools/generate_build_config.py --backend 'ort;trt' `
|
||||
--system windows --output config.yml --device cuda --build-sdk --build-sdk-monolithic `
|
||||
--build-sdk-python --sdk-dynamic-net
|
||||
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
|
||||
- name: Move artifact
|
||||
run: |
|
||||
New-Item "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION" -ItemType Directory -Force
|
||||
Remove-Item "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/*" -Force -Recurse
|
||||
Move-Item pack/* "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION"
|
||||
|
||||
windows_test:
|
||||
runs-on: [self-hosted, win10-3080]
|
||||
needs: windows_build
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Get mmdeploy version
|
||||
run: |
|
||||
conda activate mmdeploy-3.8
|
||||
$env:MMDEPLOY_VERSION=(python -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
|
||||
echo $env:MMDEPLOY_VERSION
|
||||
echo "MMDEPLOY_VERSION=$env:MMDEPLOY_VERSION" >> $env:GITHUB_ENV
|
||||
- name: Test python
|
||||
run: |
|
||||
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION"
|
||||
. D:\DEPS\cienv\prebuild_cpu_env.ps1
|
||||
conda activate ci-test
|
||||
& "$env:GITHUB_WORKSPACE/tools/package_tools/test/test_sdk_python.ps1"
|
||||
- name: Test c/cpp
|
||||
run: |
|
||||
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION"
|
||||
. D:\DEPS\cienv\prebuild_cpu_env.ps1
|
||||
& "$env:GITHUB_WORKSPACE/tools/package_tools/test/test_sdk.ps1"
|
||||
|
||||
windows_upload:
|
||||
runs-on: [self-hosted, win10-3080]
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
environment: 'prod'
|
||||
needs: windows_test
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Get mmdeploy version
|
||||
run: |
|
||||
conda activate mmdeploy-3.8
|
||||
$env:MMDEPLOY_VERSION=(python -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
|
||||
echo $env:MMDEPLOY_VERSION
|
||||
echo "MMDEPLOY_VERSION=$env:MMDEPLOY_VERSION" >> $env:GITHUB_ENV
|
||||
- name: Upload mmdeploy
|
||||
run: |
|
||||
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/mmdeploy"
|
||||
conda activate mmdeploy-3.8
|
||||
# twine upload * --repository testpypi -u __token__ -p ${{ secrets.test_pypi_password }}
|
||||
twine upload * -u __token__ -p ${{ secrets.pypi_password }}
|
||||
- name: Upload mmdeploy_runtime
|
||||
run: |
|
||||
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/mmdeploy_runtime"
|
||||
conda activate mmdeploy-3.8
|
||||
# twine upload * --repository testpypi -u __token__ -p ${{ secrets.test_pypi_password }}
|
||||
twine upload * -u __token__ -p ${{ secrets.pypi_password }}
|
||||
- name: Zip mmdeploy sdk
|
||||
run: |
|
||||
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/sdk"
|
||||
$folders = $(ls).Name
|
||||
foreach ($folder in $folders) {
|
||||
Compress-Archive -Path $folder -DestinationPath "$folder.zip"
|
||||
}
|
||||
- name: Upload mmdeploy sdk
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: |
|
||||
D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/sdk/*.zip
|
|
@ -3,7 +3,9 @@ repos:
|
|||
rev: 4.0.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
args: ["--exclude=*/client/inference_pb2.py,*/client/inference_pb2_grpc.py"]
|
||||
args: ["--exclude=*/client/inference_pb2.py, \
|
||||
*/client/inference_pb2_grpc.py, \
|
||||
tools/package_tools/packaging/setup.py"]
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.11.5
|
||||
hooks:
|
||||
|
|
|
@ -3,6 +3,7 @@ include mmdeploy/backend/ncnn/*.so
|
|||
include mmdeploy/backend/ncnn/*.dll
|
||||
include mmdeploy/backend/ncnn/*.pyd
|
||||
include mmdeploy/lib/*.so
|
||||
include mmdeploy/lib/*.so*
|
||||
include mmdeploy/lib/*.dll
|
||||
include mmdeploy/lib/*.pyd
|
||||
include mmdeploy/backend/torchscript/*.so
|
||||
|
|
|
@ -23,11 +23,27 @@
|
|||
namespace mmdeploy {
|
||||
namespace {
|
||||
|
||||
#ifdef _WIN32
|
||||
inline static const std::wstring GetDllPath() {
|
||||
HMODULE hm = NULL;
|
||||
GetModuleHandleExW(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT,
|
||||
(LPWSTR)&GetDllPath, &hm);
|
||||
std::wstring ret;
|
||||
ret.resize(MAX_PATH);
|
||||
GetModuleFileNameW(hm, &ret[0], ret.size());
|
||||
ret = ret.substr(0, ret.find_last_of(L"/\\"));
|
||||
return ret;
|
||||
}
|
||||
#endif
|
||||
|
||||
void* mmdeploy_load_library(const char* name) {
|
||||
fprintf(stderr, "loading %s ...\n", name);
|
||||
|
||||
#ifdef _WIN32
|
||||
auto handle = LoadLibraryExA(name, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
|
||||
auto handle = LoadLibraryExA(name, NULL, LOAD_LIBRARY_SEARCH_USER_DIRS);
|
||||
if (handle == NULL) {
|
||||
handle = LoadLibraryExA(name, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
|
||||
}
|
||||
#else
|
||||
auto handle = dlopen(name, RTLD_NOW | RTLD_GLOBAL);
|
||||
#endif
|
||||
|
@ -44,6 +60,9 @@ void* mmdeploy_load_library(const char* name) {
|
|||
class Loader {
|
||||
public:
|
||||
Loader() {
|
||||
#ifdef _WIN32
|
||||
AddDllDirectory(GetDllPath().c_str());
|
||||
#endif
|
||||
const char* modules[] = {
|
||||
@_MMDEPLOY_DYNAMIC_MODULES@
|
||||
};
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
# Copyright (c) OpenMMLab. All rights reserved.
|
||||
|
||||
cmake_minimum_required(VERSION 3.14)
|
||||
project(mmdeploy_python)
|
||||
project(mmdeploy_runtime)
|
||||
|
||||
set(MMDEPLOY_PYTHON_SRCS
|
||||
set(MMDEPLOY_RUNTIME_SRCS
|
||||
common.cpp
|
||||
internal.cpp
|
||||
pipeline.cpp)
|
||||
|
@ -20,12 +20,21 @@ elseif (NOT TARGET pybind11)
|
|||
endif ()
|
||||
|
||||
foreach (task_name ${MMDEPLOY_TASKS})
|
||||
list(APPEND MMDEPLOY_PYTHON_SRCS ${task_name}.cpp)
|
||||
list(APPEND MMDEPLOY_RUNTIME_SRCS ${task_name}.cpp)
|
||||
endforeach ()
|
||||
|
||||
pybind11_add_module(${PROJECT_NAME} ${MMDEPLOY_PYTHON_SRCS})
|
||||
pybind11_add_module(${PROJECT_NAME} ${MMDEPLOY_RUNTIME_SRCS})
|
||||
# disable MMDEPLOY_CXX_USE_OPENCV in apis/cxx/mmdeploy/common.hpp
|
||||
target_compile_definitions(${PROJECT_NAME} PRIVATE -DMMDEPLOY_CXX_USE_OPENCV=0)
|
||||
if (APPLE)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES
|
||||
BUILD_RPATH "@loader_path"
|
||||
INSTALL_RPATH "@loader_path")
|
||||
else ()
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES
|
||||
BUILD_RPATH "\$ORIGIN"
|
||||
INSTALL_RPATH "\$ORIGIN")
|
||||
endif ()
|
||||
|
||||
# https://github.com/pybind/pybind11/issues/1604
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
|
||||
|
|
|
@ -165,7 +165,7 @@ static PythonBindingRegisterer register_scheduler{[](py::module& m) {
|
|||
|
||||
} // namespace mmdeploy::python
|
||||
|
||||
PYBIND11_MODULE(mmdeploy_python, m) {
|
||||
PYBIND11_MODULE(mmdeploy_runtime, m) {
|
||||
for (const auto& f : mmdeploy::python::gPythonBindings()) {
|
||||
f(m);
|
||||
}
|
||||
|
|
|
@ -22,6 +22,15 @@ target_link_libraries(${PROJECT_NAME}_obj PUBLIC onnxruntime)
|
|||
|
||||
mmdeploy_add_library(${PROJECT_NAME} SHARED EXCLUDE "")
|
||||
target_link_libraries(${PROJECT_NAME} PUBLIC ${PROJECT_NAME}_obj)
|
||||
if (APPLE)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES
|
||||
BUILD_RPATH "@loader_path"
|
||||
INSTALL_RPATH "@loader_path")
|
||||
else ()
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES
|
||||
BUILD_RPATH "\$ORIGIN"
|
||||
INSTALL_RPATH "\$ORIGIN")
|
||||
endif ()
|
||||
add_library(mmdeploy::onnxruntime::ops ALIAS ${PROJECT_NAME})
|
||||
|
||||
set(_ORT_OPS_DIR ${CMAKE_SOURCE_DIR}/mmdeploy/lib)
|
||||
|
|
|
@ -18,7 +18,7 @@ class CpuHostMemory : public NonCopyable {
|
|||
#elif defined(ANDROID)
|
||||
posix_memalign(&data_, alignment, space);
|
||||
#else
|
||||
data_ = std::aligned_alloc(alignment, space);
|
||||
data_ = aligned_alloc(alignment, space);
|
||||
#endif
|
||||
if (!data_) {
|
||||
return Status(eOutOfMemory);
|
||||
|
|
|
@ -10,6 +10,6 @@ target_include_directories(${PROJECT_NAME} PRIVATE
|
|||
target_include_directories(${PROJECT_NAME} PRIVATE ${CUDNN_DIR}/include)
|
||||
target_include_directories(${PROJECT_NAME} PRIVATE ${CUDA_TOOLKIT_ROOT_DIR}/include)
|
||||
target_link_libraries(${PROJECT_NAME} PRIVATE mmdeploy_tensorrt_ops_obj)
|
||||
target_link_libraries(${PROJECT_NAME} PUBLIC ${TENSORRT_LIBS} cudnn)
|
||||
target_link_libraries(${PROJECT_NAME} PUBLIC ${TENSORRT_LIBS} cudnn cuda)
|
||||
|
||||
add_library(mmdeploy::trt_net ALIAS ${PROJECT_NAME})
|
||||
|
|
|
@ -76,7 +76,7 @@ int main(int argc, char* argv[]) {
|
|||
|
||||
auto output = pipeline.Apply(mat);
|
||||
|
||||
MMDEPLOY_INFO("output:\n{}", output);
|
||||
// MMDEPLOY_INFO("output:\n{}", output);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import argparse
|
|||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from mmdeploy_python import Detector, PoseDetector
|
||||
from mmdeploy_runtime import Detector, PoseDetector
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import argparse
|
||||
|
||||
import cv2
|
||||
from mmdeploy_python import Classifier
|
||||
from mmdeploy_runtime import Classifier
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import argparse
|
||||
|
||||
import cv2
|
||||
from mmdeploy_python import Restorer
|
||||
from mmdeploy_runtime import Restorer
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -3,7 +3,7 @@ import argparse
|
|||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from mmdeploy_python import Segmentor
|
||||
from mmdeploy_runtime import Segmentor
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import argparse
|
||||
|
||||
import cv2
|
||||
from mmdeploy_python import Detector
|
||||
from mmdeploy_runtime import Detector
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import argparse
|
||||
|
||||
import cv2
|
||||
from mmdeploy_python import TextDetector, TextRecognizer
|
||||
from mmdeploy_runtime import TextDetector, TextRecognizer
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -3,7 +3,7 @@ import argparse
|
|||
import json
|
||||
|
||||
import cv2
|
||||
from mmdeploy_python import Context, Device, Model, Pipeline
|
||||
from mmdeploy_runtime import Context, Device, Model, Pipeline
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -3,7 +3,7 @@ import argparse
|
|||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from mmdeploy_python import PoseDetector
|
||||
from mmdeploy_runtime import PoseDetector
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -3,7 +3,7 @@ import argparse
|
|||
import os
|
||||
|
||||
import cv2
|
||||
from mmdeploy_python import PoseTracker
|
||||
from mmdeploy_runtime import PoseTracker
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -4,7 +4,7 @@ from math import cos, sin
|
|||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from mmdeploy_python import RotatedDetector
|
||||
from mmdeploy_runtime import RotatedDetector
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import argparse
|
||||
|
||||
import cv2
|
||||
from mmdeploy_python import VideoRecognizer
|
||||
from mmdeploy_runtime import VideoRecognizer
|
||||
|
||||
|
||||
def parse_args():
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
FROM quay.io/pypa/manylinux2014_x86_64
|
||||
|
||||
# package urls
|
||||
ARG CUDA_URL
|
||||
ARG CUDNN_URL
|
||||
ARG TENSORRT_URL
|
||||
|
||||
ARG CUDA_VERSION=11.3
|
||||
|
||||
# important dependencies
|
||||
ARG OPENCV_VERSION=4.5.5
|
||||
ARG PPLCV_VERSION=0.7.0
|
||||
|
||||
# backends
|
||||
ARG ONNXRUNTIME_VERSION=1.8.1
|
||||
ARG TENSORRT_VERSION=8.2.3.0
|
||||
|
||||
# torch
|
||||
ARG TORCH_VERSION=1.10.0
|
||||
ARG TORCHVISION_VERSION=0.11.0
|
||||
|
||||
USER root
|
||||
WORKDIR /root/workspace
|
||||
|
||||
ENV FORCE_CUDA="1"
|
||||
|
||||
# install cuda cudnn
|
||||
RUN curl -fsSL -v -o ./cuda_install.run -O $CUDA_URL &&\
|
||||
chmod +x ./cuda_install.run &&\
|
||||
./cuda_install.run --silent --toolkit &&\
|
||||
rm -f ./cuda_install.run &&\
|
||||
curl -fsSL -v -o ./cudnn.tgz -O $CUDNN_URL &&\
|
||||
tar -xzvf ./cudnn.tgz &&\
|
||||
rm -f ./cudnn.tgz &&\
|
||||
mv cu* /opt/cudnn
|
||||
|
||||
# install ort, trt
|
||||
RUN curl -fsSL -v -o ./onnxruntime.tgz -O https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}.tgz &&\
|
||||
tar -xzvf onnxruntime.tgz &&\
|
||||
rm onnxruntime.tgz &&\
|
||||
mv onnxruntime* /opt/onnxruntime &&\
|
||||
curl -fsSL -v -o ./tensorrt.tgz -O $TENSORRT_URL &&\
|
||||
tar -xzvf ./tensorrt.tgz &&\
|
||||
rm -f ./tensorrt.tgz &&\
|
||||
mv ./TensorRT* /opt/TensorRT &&\
|
||||
cd /opt/TensorRT &&\
|
||||
rm -rf data doc samples uff
|
||||
|
||||
ENV CUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda
|
||||
ENV CUDNN_DIR=/opt/cudnn
|
||||
ENV ONNXRUNTIME_DIR=/opt/onnxruntime
|
||||
ENV TENSORRT_DIR=/opt/TensorRT
|
||||
|
||||
ENV LD_LIBRARY_PATH=$CUDA_TOOLKIT_ROOT_DIR/lib64:$CUDNN_DIR/lib64:$LD_LIBRARY_PATH
|
||||
ENV LD_LIBRARY_PATH=${ONNXRUNTIME_DIR}/lib:$TENSORRT_DIR/lib:$LD_LIBRARY_PATH
|
||||
ENV PATH=$TENSORRT_DIR/bin:$PATH
|
||||
|
||||
### install ppl.cv
|
||||
RUN git clone --depth 1 --branch v${PPLCV_VERSION} https://github.com/openppl-public/ppl.cv.git &&\
|
||||
cd ppl.cv &&\
|
||||
./build.sh cuda &&\
|
||||
mv cuda-build/install ./ &&\
|
||||
rm -rf cuda-build
|
||||
|
||||
ENV pplcv_DIR=/root/workspace/ppl.cv/install/lib/cmake/ppl
|
||||
|
||||
# build opencv as static lib
|
||||
RUN curl -fsSL -v -o ./opencv.tgz -O https://github.com/opencv/opencv/archive/refs/tags/${OPENCV_VERSION}.tar.gz &&\
|
||||
tar -xzvf ./opencv.tgz &&\
|
||||
rm -f ./opencv.tgz &&\
|
||||
cd opencv-${OPENCV_VERSION} &&\
|
||||
mkdir build && cd build &&\
|
||||
cmake .. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr/local \
|
||||
-DOPENCV_FORCE_3RDPARTY_BUILD=ON \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DBUILD_PERF_TESTS=OFF \
|
||||
-DBUILD_SHARED_LIBS=OFF &&\
|
||||
make -j$(nproc) && make install
|
||||
|
||||
ENV OpenCV_DIR=/usr/local/lib64/cmake/opencv4
|
||||
|
||||
# install conda env
|
||||
RUN curl -fsSL -v -o ~/miniconda.sh -O https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh &&\
|
||||
chmod +x ~/miniconda.sh &&\
|
||||
bash ~/miniconda.sh -b -p /opt/conda &&\
|
||||
rm ~/miniconda.sh &&\
|
||||
/opt/conda/bin/conda create -n mmdeploy-3.6 python=3.6 -y &&\
|
||||
/opt/conda/bin/conda create -n mmdeploy-3.7 python=3.7 -y &&\
|
||||
/opt/conda/bin/conda create -n mmdeploy-3.8 python=3.8 -y &&\
|
||||
/opt/conda/bin/conda create -n mmdeploy-3.9 python=3.9 -y &&\
|
||||
/opt/conda/bin/conda create -n mmdeploy-3.10 python=3.10 -y &&\
|
||||
export CUDA_INT=$(echo $CUDA_VERSION | awk '{split($0, a, "."); print a[1]a[2]}') &&\
|
||||
/opt/conda/bin/conda create -n torch${TORCH_VERSION} python=3.8 -y &&\
|
||||
/opt/conda/envs/mmdeploy-3.6/bin/pip install --no-cache-dir setuptools wheel pyyaml &&\
|
||||
/opt/conda/envs/torch${TORCH_VERSION}/bin/pip install --no-cache-dir onnxruntime-gpu==${ONNXRUNTIME_VERSION} &&\
|
||||
/opt/conda/envs/torch${TORCH_VERSION}/bin/pip install ${TENSORRT_DIR}/python/tensorrt-*-cp38-none-linux_x86_64.whl &&\
|
||||
/opt/conda/envs/torch${TORCH_VERSION}/bin/pip install --no-cache-dir torch==${TORCH_VERSION}+cu${CUDA_INT} \
|
||||
torchvision==${TORCHVISION_VERSION}+cu${CUDA_INT} -f https://download.pytorch.org/whl/torch_stable.html &&\
|
||||
/opt/conda/bin/conda clean -ya
|
||||
|
||||
ENV CONDA=/opt/conda
|
||||
ENV PATH=$CONDA/bin:$PATH
|
|
@ -77,12 +77,12 @@ After the above work is done, the structure of the current working directory sho
|
|||
|
||||
In order to use `ONNX Runtime` backend, you should also do the following steps.
|
||||
|
||||
5. Install `mmdeploy` (Model Converter) and `mmdeploy_python` (SDK Python API).
|
||||
5. Install `mmdeploy` (Model Converter) and `mmdeploy_runtime` (SDK Python API).
|
||||
|
||||
```bash
|
||||
# download mmdeploy-0.13.0-windows-amd64-onnxruntime1.8.1.zip
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-onnxruntime1.8.1\dist\mmdeploy-0.13.0-py38-none-win_amd64.whl
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-onnxruntime1.8.1\sdk\python\mmdeploy_python-0.13.0-cp38-none-win_amd64.whl
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-onnxruntime1.8.1\sdk\python\mmdeploy_runtime-0.13.0-cp38-none-win_amd64.whl
|
||||
```
|
||||
|
||||
:point_right: If you have installed it before, please uninstall it first.
|
||||
|
@ -104,12 +104,12 @@ In order to use `ONNX Runtime` backend, you should also do the following steps.
|
|||
|
||||
In order to use `TensorRT` backend, you should also do the following steps.
|
||||
|
||||
5. Install `mmdeploy` (Model Converter) and `mmdeploy_python` (SDK Python API).
|
||||
5. Install `mmdeploy` (Model Converter) and `mmdeploy_runtime` (SDK Python API).
|
||||
|
||||
```bash
|
||||
# download mmdeploy-0.13.0-windows-amd64-cuda11.1-tensorrt8.2.3.0.zip
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-cuda11.1-tensorrt8.2.3.0\dist\mmdeploy-0.13.0-py38-none-win_amd64.whl
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-cuda11.1-tensorrt8.2.3.0\sdk\python\mmdeploy_python-0.13.0-cp38-none-win_amd64.whl
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-cuda11.1-tensorrt8.2.3.0\sdk\python\mmdeploy_runtime-0.13.0-cp38-none-win_amd64.whl
|
||||
```
|
||||
|
||||
:point_right: If you have installed it before, please uninstall it first.
|
||||
|
|
|
@ -122,7 +122,7 @@ wget https://github.com/open-mmlab/mmdeploy/releases/download/v0.13.0/mmdeploy-0
|
|||
tar -zxvf mmdeploy-0.13.0-linux-x86_64-onnxruntime1.8.1.tar.gz
|
||||
cd mmdeploy-0.13.0-linux-x86_64-onnxruntime1.8.1
|
||||
pip install dist/mmdeploy-0.13.0-py3-none-linux_x86_64.whl
|
||||
pip install sdk/python/mmdeploy_python-0.13.0-cp38-none-linux_x86_64.whl
|
||||
pip install sdk/python/mmdeploy_runtime-0.13.0-cp38-none-linux_x86_64.whl
|
||||
cd ..
|
||||
# install inference engine: ONNX Runtime
|
||||
pip install onnxruntime==1.8.1
|
||||
|
@ -143,7 +143,7 @@ wget https://github.com/open-mmlab/mmdeploy/releases/download/v0.13.0/mmdeploy-0
|
|||
tar -zxvf mmdeploy-0.13.0-linux-x86_64-cuda11.1-tensorrt8.2.3.0.tar.gz
|
||||
cd mmdeploy-0.13.0-linux-x86_64-cuda11.1-tensorrt8.2.3.0
|
||||
pip install dist/mmdeploy-0.13.0-py3-none-linux_x86_64.whl
|
||||
pip install sdk/python/mmdeploy_python-0.13.0-cp38-none-linux_x86_64.whl
|
||||
pip install sdk/python/mmdeploy_runtime-0.13.0-cp38-none-linux_x86_64.whl
|
||||
cd ..
|
||||
# install inference engine: TensorRT
|
||||
# !!! Download TensorRT-8.2.3.0 CUDA 11.x tar package from NVIDIA, and extract it to the current directory
|
||||
|
@ -249,7 +249,7 @@ In the next section, we will provide examples of deploying the converted Faster
|
|||
#### Python API
|
||||
|
||||
```python
|
||||
from mmdeploy_python import Detector
|
||||
from mmdeploy_runtime import Detector
|
||||
import cv2
|
||||
|
||||
img = cv2.imread('mmdetection/demo/demo.jpg')
|
||||
|
|
|
@ -85,12 +85,12 @@ ______________________________________________________________________
|
|||
|
||||
本节介绍`mmdeploy`使用`ONNX Runtime`推理所特有的环境准备工作
|
||||
|
||||
5. 安装`mmdeploy`(模型转换)以及`mmdeploy_python`(模型推理Python API)的预编译包
|
||||
5. 安装`mmdeploy`(模型转换)以及`mmdeploy_runtime`(模型推理Python API)的预编译包
|
||||
|
||||
```bash
|
||||
# 先下载 mmdeploy-0.13.0-windows-amd64-onnxruntime1.8.1.zip
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-onnxruntime1.8.1\dist\mmdeploy-0.13.0-py38-none-win_amd64.whl
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-onnxruntime1.8.1\sdk\python\mmdeploy_python-0.13.0-cp38-none-win_amd64.whl
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-onnxruntime1.8.1\sdk\python\mmdeploy_runtime-0.13.0-cp38-none-win_amd64.whl
|
||||
```
|
||||
|
||||
:point_right: 如果之前安装过,需要先卸载后再安装。
|
||||
|
@ -112,12 +112,12 @@ ______________________________________________________________________
|
|||
|
||||
本节介绍`mmdeploy`使用`TensorRT`推理所特有的环境准备工作
|
||||
|
||||
5. 安装`mmdeploy`(模型转换)以及`mmdeploy_python`(模型推理Python API)的预编译包
|
||||
5. 安装`mmdeploy`(模型转换)以及`mmdeploy_runtime`(模型推理Python API)的预编译包
|
||||
|
||||
```bash
|
||||
# 先下载 mmdeploy-0.13.0-windows-amd64-cuda11.1-tensorrt8.2.3.0.zip
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-cuda11.1-tensorrt8.2.3.0\dist\mmdeploy-0.13.0-py38-none-win_amd64.whl
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-cuda11.1-tensorrt8.2.3.0\sdk\python\mmdeploy_python-0.13.0-cp38-none-win_amd64.whl
|
||||
pip install .\mmdeploy-0.13.0-windows-amd64-cuda11.1-tensorrt8.2.3.0\sdk\python\mmdeploy_runtime-0.13.0-cp38-none-win_amd64.whl
|
||||
```
|
||||
|
||||
:point_right: 如果之前安装过,需要先卸载后再安装
|
||||
|
|
|
@ -117,7 +117,7 @@ wget https://github.com/open-mmlab/mmdeploy/releases/download/v0.13.0/mmdeploy-0
|
|||
tar -zxvf mmdeploy-0.13.0-linux-x86_64-onnxruntime1.8.1.tar.gz
|
||||
cd mmdeploy-0.13.0-linux-x86_64-onnxruntime1.8.1
|
||||
pip install dist/mmdeploy-0.13.0-py3-none-linux_x86_64.whl
|
||||
pip install sdk/python/mmdeploy_python-0.13.0-cp38-none-linux_x86_64.whl
|
||||
pip install sdk/python/mmdeploy_runtime-0.13.0-cp38-none-linux_x86_64.whl
|
||||
cd ..
|
||||
# 安装推理引擎 ONNX Runtime
|
||||
pip install onnxruntime==1.8.1
|
||||
|
@ -138,7 +138,7 @@ wget https://github.com/open-mmlab/mmdeploy/releases/download/v0.13.0/mmdeploy-0
|
|||
tar -zxvf mmdeploy-0.13.0-linux-x86_64-cuda11.1-tensorrt8.2.3.0.tar.gz
|
||||
cd mmdeploy-0.13.0-linux-x86_64-cuda11.1-tensorrt8.2.3.0
|
||||
pip install dist/mmdeploy-0.13.0-py3-none-linux_x86_64.whl
|
||||
pip install sdk/python/mmdeploy_python-0.13.0-cp38-none-linux_x86_64.whl
|
||||
pip install sdk/python/mmdeploy_runtime-0.13.0-cp38-none-linux_x86_64.whl
|
||||
cd ..
|
||||
# 安装推理引擎 TensorRT
|
||||
# !!! 从 NVIDIA 官网下载 TensorRT-8.2.3.0 CUDA 11.x 安装包并解压到当前目录
|
||||
|
@ -247,7 +247,7 @@ export LD_LIBRARY_PATH=$(pwd)/sdk/lib:$LD_LIBRARY_PATH
|
|||
对于检测功能,你也可以参考如下代码,集成 MMDeploy SDK Python API 到自己的项目中:
|
||||
|
||||
```python
|
||||
from mmdeploy_python import Detector
|
||||
from mmdeploy_runtime import Detector
|
||||
import cv2
|
||||
|
||||
# 读取图片
|
||||
|
|
|
@ -9,7 +9,7 @@ from ..base import BACKEND_MANAGERS, BaseBackendManager
|
|||
|
||||
_is_available = False
|
||||
|
||||
module_name = 'mmdeploy_python'
|
||||
module_name = 'mmdeploy_runtime'
|
||||
|
||||
candidates = [
|
||||
f'../../../build/lib/{module_name}.*.so',
|
||||
|
|
|
@ -9,7 +9,7 @@ class SDKWrapper(BaseWrapper):
|
|||
|
||||
def __init__(self, model_file, task_name, device):
|
||||
super().__init__([])
|
||||
import mmdeploy_python as c_api
|
||||
import mmdeploy_runtime as c_api
|
||||
creator = getattr(c_api, task_name)
|
||||
device_id = parse_device_id(device)
|
||||
device_type = parse_device_type(device)
|
||||
|
|
|
@ -11,7 +11,7 @@ line_length = 79
|
|||
multi_line_output = 0
|
||||
extra_standard_library = setuptools
|
||||
known_first_party = mmdeploy
|
||||
known_third_party = h5py,m2r,mmcls,mmcv,mmdeploy_python,mmdet,mmedit,mmocr,mmseg,ncnn,numpy,onnx,onnxruntime,packaging,pyppeteer,pyppl,pytest,pytorch_sphinx_theme,recommonmark,setuptools,sphinx,tensorrt,torch,torchvision
|
||||
known_third_party = h5py,m2r,mmcls,mmcv,mmdeploy_runtime,mmdet,mmedit,mmocr,mmseg,ncnn,numpy,onnx,onnxruntime,packaging,pyppeteer,pyppl,pytest,pytorch_sphinx_theme,recommonmark,setuptools,sphinx,tensorrt,torch,torchvision
|
||||
no_lines_before = STDLIB,LOCALFOLDER
|
||||
default_section = THIRDPARTY
|
||||
skip = service/snpe/client/inference_pb2.py,service/snpe/client/inference_pb2_grpc.py
|
||||
|
|
|
@ -6,10 +6,10 @@ This document is going to describe the way to build MMDeploy package.
|
|||
|
||||
- Download and install Miniconda from the [official website](https://docs.conda.io/en/latest/miniconda.html).
|
||||
|
||||
- Create conda environments for python 3.6, 3.7, 3.8 and 3.9, respectively.
|
||||
- Create conda environments for python 3.6, 3.7, 3.8, 3.9 and 3.10, respectively.
|
||||
|
||||
```shell
|
||||
for PYTHON_VERSION in 3.6 3.7 3.8 3.9
|
||||
for PYTHON_VERSION in 3.6 3.7 3.8 3.9 3.10
|
||||
do
|
||||
conda create --name mmdeploy-$PYTHON_VERSION python=$PYTHON_VERSION -y
|
||||
done
|
||||
|
@ -28,20 +28,24 @@ This document is going to describe the way to build MMDeploy package.
|
|||
|
||||
```shell
|
||||
conda activate mmdeploy-3.6
|
||||
pip install pyyaml
|
||||
pip install pyyaml packaging
|
||||
cd the/root/path/of/mmdeploy
|
||||
python tools/package_tools/mmdeploy_builder.py tools/package_tools/configs/linux_x64.yaml .
|
||||
python tools/package_tools/generate_build_config.py --backend 'ort' \
|
||||
--system linux --build-mmdeploy --device cpu --build-sdk \
|
||||
--build-sdk-monolithic --build-sdk-python --sdk-dynamic-net \
|
||||
--output config.yml
|
||||
python tools/package_tools/mmdeploy_builder.py --config config.yml --output-dir pack
|
||||
```
|
||||
|
||||
You will get the precompiled packages `mmdeploy-{version}-linux-x86_64-cuda11.1-tensorrt8.2.3.0` and `mmdeploy-{version}-linux-x86_64-onnxruntime1.8.1` in the current directory if everything's going well.
|
||||
|
||||
- On Windows platform, open `Anaconda Powershell Prompt` from the start menu and execute:
|
||||
|
||||
```shell
|
||||
conda activate mmdeploy-3.6
|
||||
pip install pyyaml
|
||||
pip install pyyaml packaging
|
||||
cd the/root/path/of/MMDeploy
|
||||
python tools/package_tools/mmdeploy_builder.py tools/package_tools/configs/windows_x64.yaml .
|
||||
python tools/package_tools/generate_build_config.py --backend 'ort' \
|
||||
--system windows --build-mmdeploy --device cpu --build-sdk \
|
||||
--build-sdk-monolithic --build-sdk-python --sdk-dynamic-net \
|
||||
--output config.yml
|
||||
python tools/package_tools/mmdeploy_builder.py --config config.yml --output-dir pack
|
||||
```
|
||||
|
||||
When the build procedure finishes successfully, you will find `mmdeploy-{version}-windows-amd64-cuda11.1-tensorrt8.2.3.0` and `mmdeploy-{version}-windows-amd64-onnxruntime1.8.1` precompiled packages in the current directory.
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
global_config:
|
||||
cmake_envs:
|
||||
CMAKE_CXX_COMPILER: "g++-7"
|
||||
MMDEPLOY_BUILD_SDK: "ON"
|
||||
MMDEPLOY_BUILD_SDK_PYTHON_API: "ON"
|
||||
MMDEPLOY_TARGET_DEVICES: '"cpu;cuda"'
|
||||
MMDEPLOY_TARGET_BACKENDS: "trt"
|
||||
TENSORRT_DIR: "/usr/include/aarch64-linux-gnu"
|
||||
CUDA_TOOLKIT_ROOT_DIR: "/usr/local/cuda"
|
||||
pplcv_DIR: ${pplcv_DIR}/cuda-build/install/lib/cmake/ppl
|
||||
|
||||
local_configs:
|
||||
- BUILD_NAME: "mmdeploy-{mmdeploy_v}-jetson-cuda{cuda_v}"
|
|
@ -1,22 +0,0 @@
|
|||
global_config:
|
||||
cmake_envs:
|
||||
CMAKE_CXX_COMPILER: "g++-7"
|
||||
MMDEPLOY_BUILD_SDK: "ON"
|
||||
MMDEPLOY_BUILD_SDK_MONOLITHIC: "ON"
|
||||
MMDEPLOY_SHARED_LIBS: "OFF"
|
||||
OpenCV_DIR: "${OpenCV_DIR}"
|
||||
|
||||
local_configs:
|
||||
- BUILD_NAME: "mmdeploy-{mmdeploy_v}-{system}-{machine}-onnxruntime{ort_v}"
|
||||
cmake_envs:
|
||||
MMDEPLOY_TARGET_DEVICES: '"cpu"'
|
||||
MMDEPLOY_TARGET_BACKENDS: "ort"
|
||||
ONNXRUNTIME_DIR: "${ONNXRUNTIME_DIR}"
|
||||
- BUILD_NAME: "mmdeploy-{mmdeploy_v}-{system}-{machine}-cuda{cuda_v}-tensorrt{trt_v}"
|
||||
cmake_envs:
|
||||
MMDEPLOY_TARGET_DEVICES: '"cuda"'
|
||||
MMDEPLOY_TARGET_BACKENDS: "trt"
|
||||
TENSORRT_DIR: "${TENSORRT_DIR}"
|
||||
CUDA_TOOLKIT_ROOT_DIR: "${CUDA_TOOLKIT_ROOT_DIR}"
|
||||
CUDNN_DIR: "${CUDNN_DIR}"
|
||||
pplcv_DIR: ${pplcv_DIR}/cuda-build/install/lib/cmake/ppl
|
|
@ -1,22 +0,0 @@
|
|||
global_config:
|
||||
cmake_flags: ['-A x64 -T v142']
|
||||
cmake_envs:
|
||||
MMDEPLOY_BUILD_SDK: "ON"
|
||||
MMDEPLOY_BUILD_SDK_MONOLITHIC: "ON"
|
||||
MMDEPLOY_SHARED_LIBS: "OFF"
|
||||
MMDEPLOY_CODEBASES: "all"
|
||||
OpenCV_DIR: "%OpenCV_DIR%"
|
||||
|
||||
local_configs:
|
||||
- BUILD_NAME: "mmdeploy-{mmdeploy_v}-{system}-{machine}-onnxruntime{ort_v}"
|
||||
cmake_envs:
|
||||
MMDEPLOY_TARGET_DEVICES: '"cpu"'
|
||||
MMDEPLOY_TARGET_BACKENDS: "ort"
|
||||
ONNXRUNTIME_DIR: "%ONNXRUNTIME_DIR%"
|
||||
- BUILD_NAME: "mmdeploy-{mmdeploy_v}-{system}-{machine}-cuda{cuda_v}-tensorrt{trt_v}"
|
||||
cmake_envs:
|
||||
MMDEPLOY_TARGET_DEVICES: '"cuda"'
|
||||
MMDEPLOY_TARGET_BACKENDS: "trt"
|
||||
pplcv_DIR: "%PPLCV_DIR%\\pplcv-build\\install\\lib\\cmake\\ppl"
|
||||
TENSORRT_DIR: "%TENSORRT_DIR%"
|
||||
CUDNN_DIR: "%CUDNN_DIR%"
|
|
@ -0,0 +1,172 @@
|
|||
# Copyright (c) OpenMMLab. All rights reserved.
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from distutils.util import get_platform
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='MMDeploy create build config')
|
||||
parser.add_argument(
|
||||
'--backend',
|
||||
required=True,
|
||||
type=str,
|
||||
help='target backend. Eg: "ort;trt"')
|
||||
parser.add_argument(
|
||||
'--system',
|
||||
required=True,
|
||||
type=str,
|
||||
help='target system, Eg: windows/linux/jetson')
|
||||
parser.add_argument(
|
||||
'--build-mmdeploy',
|
||||
action='store_true',
|
||||
help='whether build mmdeploy runtime package')
|
||||
parser.add_argument(
|
||||
'--build-sdk', action='store_true', help='whether build sdk c/cpp api')
|
||||
parser.add_argument(
|
||||
'--sdk-dynamic-net',
|
||||
action='store_true',
|
||||
help='whether build mmdeploy sdk dynamic net')
|
||||
parser.add_argument('--device', type=str, help='target device. Eg: "cpu"')
|
||||
parser.add_argument(
|
||||
'--shared', action='store_true', help='whether build shared lib')
|
||||
parser.add_argument(
|
||||
'--build-sdk-monolithic',
|
||||
action='store_true',
|
||||
help='whether build sdk monolithic')
|
||||
parser.add_argument(
|
||||
'--build-sdk-python',
|
||||
action='store_true',
|
||||
help='whether build sdk python api')
|
||||
parser.add_argument(
|
||||
'--opencv-dir',
|
||||
type=str,
|
||||
help='opencv path that contains OpenCVConfig.cmake, '
|
||||
'default use $ENV{OpenCV_DIR}')
|
||||
parser.add_argument(
|
||||
'--pplcv-dir',
|
||||
type=str,
|
||||
help='pplcv path that contains pplcv-config.cmake, '
|
||||
'default use $ENV{pplcv_DIR}')
|
||||
parser.add_argument(
|
||||
'--onnxruntime-dir',
|
||||
type=str,
|
||||
help='onnxruntime root path, default use $ENV{ONNXRUNTIME_DIR}')
|
||||
parser.add_argument(
|
||||
'--tensorrt-dir',
|
||||
type=str,
|
||||
help='tensorrt root path, default use $ENV{TENSORRT_DIR}')
|
||||
parser.add_argument(
|
||||
'--cudnn-dir',
|
||||
type=str,
|
||||
help='cudnn root dir, default use $ENV{CUDNN_DIR}')
|
||||
parser.add_argument(
|
||||
'--output', required=True, type=str, help='output config file path')
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def generate_config(args):
|
||||
config = {}
|
||||
cmake_cfg = {}
|
||||
|
||||
# wheel platform tag
|
||||
if args.system in ['linux', 'jetson']:
|
||||
config['PLATFORM_TAG'] = 'manylinux2014_x86_64'
|
||||
else:
|
||||
config['PLATFORM_TAG'] = get_platform().replace('-',
|
||||
'_').replace('.', '_')
|
||||
|
||||
config['BUILD_MMDEPLOY'] = 'ON' if args.build_mmdeploy else 'OFF'
|
||||
|
||||
# deps for mmdeploy
|
||||
cmake_cfg['MMDEPLOY_TARGET_BACKENDS'] = args.backend
|
||||
if 'ort' in args.backend:
|
||||
if args.onnxruntime_dir:
|
||||
cmake_cfg['ONNXRUNTIME_DIR'] = args.onnxruntime_dir
|
||||
elif 'ONNXRUNTIME_DIR' in os.environ:
|
||||
cmake_cfg['ONNXRUNTIME_DIR'] = os.environ['ONNXRUNTIME_DIR']
|
||||
else:
|
||||
raise Exception('please provide --onnxruntime-dir')
|
||||
if 'trt' in args.backend:
|
||||
if args.tensorrt_dir:
|
||||
cmake_cfg['TENSORRT_DIR'] = args.tensorrt_dir
|
||||
elif 'TENSORRT_DIR' in os.environ:
|
||||
cmake_cfg['TENSORRT_DIR'] = os.environ['TENSORRT_DIR']
|
||||
else:
|
||||
raise Exception('please provide --tensorrt-dir')
|
||||
|
||||
if args.cudnn_dir:
|
||||
cmake_cfg['CUDNN_DIR'] = args.cudnn_dir
|
||||
elif 'CUDNN_DIR' in os.environ:
|
||||
cmake_cfg['CUDNN_DIR'] = os.environ['CUDNN_DIR']
|
||||
else:
|
||||
raise Exception('please provide --cudnn-dir')
|
||||
|
||||
# deps for mmdeploy-python
|
||||
if args.build_sdk:
|
||||
cmake_cfg['MMDEPLOY_BUILD_SDK'] = 'ON'
|
||||
cmake_cfg[
|
||||
'MMDEPLOY_BUILD_SDK_MONOLITHIC'] = 'ON' \
|
||||
if args.build_sdk_monolithic else 'OFF'
|
||||
cmake_cfg[
|
||||
'MMDEPLOY_BUILD_SDK_PYTHON_API'] = 'ON' \
|
||||
if args.build_sdk_python else 'OFF'
|
||||
cmake_cfg['MMDEPLOY_SHARED_LIBS'] = 'ON' if args.shared else 'OFF'
|
||||
cmake_cfg['MMDEPLOY_TARGET_DEVICES'] = args.device
|
||||
cmake_cfg[
|
||||
'MMDEPLOY_DYNAMIC_BACKEND'] = 'ON' \
|
||||
if args.sdk_dynamic_net else 'OFF'
|
||||
|
||||
if args.opencv_dir:
|
||||
cmake_cfg['OpenCV_DIR'] = args.opencv_dir
|
||||
elif 'OpenCV_DIR' in os.environ:
|
||||
cmake_cfg['OpenCV_DIR'] = os.environ['OpenCV_DIR']
|
||||
else:
|
||||
raise Exception('please provide --opencv-dir')
|
||||
|
||||
if args.device == 'cuda':
|
||||
if args.pplcv_dir:
|
||||
cmake_cfg['pplcv_DIR'] = args.pplcv_dir
|
||||
elif 'pplcv_DIR' in os.environ:
|
||||
cmake_cfg['pplcv_DIR'] = os.environ['pplcv_DIR']
|
||||
else:
|
||||
raise Exception('please provide --pplcv-dir')
|
||||
|
||||
# sdk package template
|
||||
if args.system in ['windows', 'linux']:
|
||||
name = 'mmdeploy-{mmdeploy_v}-{system}-{machine}'
|
||||
if args.device == 'cpu':
|
||||
name = '{}-cpu'.format(name)
|
||||
elif args.device == 'cuda':
|
||||
name = '{}-cuda'.format(name) + '{cuda_v}'
|
||||
else:
|
||||
raise Exception('unsupported device')
|
||||
config['BUILD_SDK_NAME'] = name
|
||||
elif args.system == 'jetson':
|
||||
config['BUILD_SDK_NAME'] = 'mmdeploy-{mmdeploy_v}-jetson-{machine}'
|
||||
else:
|
||||
raise Exception('unsupported system')
|
||||
else:
|
||||
cmake_cfg['MMDEPLOY_BUILD_SDK'] = 'OFF'
|
||||
cmake_cfg['MMDEPLOY_BUILD_SDK_PYTHON_API'] = 'OFF'
|
||||
|
||||
config['cmake_cfg'] = cmake_cfg
|
||||
return config
|
||||
|
||||
|
||||
def main():
|
||||
# Parse arguments
|
||||
args = parse_arguments()
|
||||
print(args)
|
||||
|
||||
config = generate_config(args)
|
||||
with open(args.output, 'w') as f:
|
||||
yaml.dump(config, f)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
|
@ -8,10 +8,8 @@ import platform
|
|||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
from distutils.util import get_platform
|
||||
from glob import glob
|
||||
from subprocess import CalledProcessError, check_output, run
|
||||
from subprocess import check_output, run
|
||||
from typing import Dict
|
||||
|
||||
import yaml
|
||||
|
@ -20,9 +18,9 @@ from packaging import version
|
|||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
CUR_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
CUR_DIR = osp.dirname(osp.abspath(__file__))
|
||||
MMDEPLOY_DIR = osp.abspath(osp.join(CUR_DIR, '../..'))
|
||||
PACKAGING_DIR = osp.join(CUR_DIR, 'packaging')
|
||||
PLATFORM_TAG = get_platform().replace('-', '_').replace('.', '_')
|
||||
|
||||
|
||||
def get_version(version_file):
|
||||
|
@ -31,23 +29,15 @@ def get_version(version_file):
|
|||
return locals()['__version__']
|
||||
|
||||
|
||||
def _merge_cfg(cfg0, cfg1):
|
||||
cfg = copy.deepcopy(cfg0)
|
||||
for k, v in cfg1.items():
|
||||
if k in cfg:
|
||||
cfg[k] = _merge_cfg(cfg0[k], cfg1[k])
|
||||
else:
|
||||
cfg[k] = v
|
||||
return cfg
|
||||
|
||||
|
||||
def _remove_if_exist(path):
|
||||
if osp.exists(path):
|
||||
logging.info(f'Remove path: {path}.')
|
||||
logging.info(f'Remove path: {path}')
|
||||
if osp.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
os.remove(path)
|
||||
if osp.islink(path):
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def _copy(src_path, dst_path):
|
||||
|
@ -57,7 +47,7 @@ def _copy(src_path, dst_path):
|
|||
if osp.isdir(src_path):
|
||||
if osp.exists(dst_path):
|
||||
shutil.rmtree(dst_path)
|
||||
shutil.copytree(src_path, dst_path)
|
||||
shutil.copytree(src_path, dst_path, symlinks=True)
|
||||
else:
|
||||
shutil.copy(src_path, dst_path)
|
||||
|
||||
|
@ -78,12 +68,6 @@ def _call_command(cmd, cwd, stdout=None, stderr=None):
|
|||
exit(-1)
|
||||
|
||||
|
||||
def _create_tar(path, tar_name):
|
||||
logging.info(f'create tar file: {tar_name}')
|
||||
with tarfile.open(tar_name, 'w:gz') as tar:
|
||||
tar.add(path, arcname=os.path.basename(path))
|
||||
|
||||
|
||||
def _create_bdist_cmd(cfg, c_ext=False, dist_dir=None):
|
||||
|
||||
bdist_tags = cfg.get('bdist_tags', {})
|
||||
|
@ -92,7 +76,7 @@ def _create_bdist_cmd(cfg, c_ext=False, dist_dir=None):
|
|||
bdist_cmd = 'python setup.py bdist_wheel '
|
||||
|
||||
# platform
|
||||
bdist_cmd += f' --plat-name {PLATFORM_TAG} '
|
||||
bdist_cmd += f' --plat-name {cfg["PLATFORM_TAG"]} '
|
||||
|
||||
# python tag
|
||||
python_tag = f'cp{sys.version_info.major}{sys.version_info.minor}'\
|
||||
|
@ -108,11 +92,11 @@ def _create_bdist_cmd(cfg, c_ext=False, dist_dir=None):
|
|||
return bdist_cmd
|
||||
|
||||
|
||||
def clear_mmdeploy(mmdeploy_dir: str):
|
||||
logging.info(f'cleaning mmdeploy: {mmdeploy_dir}')
|
||||
def clear_mmdeploy():
|
||||
logging.info(f'Cleaning mmdeploy: {MMDEPLOY_DIR}')
|
||||
|
||||
def _remove_in_mmdeploy(path):
|
||||
remove_dir = osp.join(mmdeploy_dir, path)
|
||||
remove_dir = osp.join(MMDEPLOY_DIR, path)
|
||||
_remove_if_exist(remove_dir)
|
||||
|
||||
# remove build file
|
||||
|
@ -130,98 +114,21 @@ def clear_mmdeploy(mmdeploy_dir: str):
|
|||
_remove_in_mmdeploy('mmdeploy/backend/ncnn/mmdeploy_onnx2ncnn')
|
||||
_remove_in_mmdeploy('mmdeploy/backend/ncnn/mmdeploy_onnx2ncnn.exe')
|
||||
ncnn_ext_paths = glob(
|
||||
osp.join(mmdeploy_dir, 'mmdeploy/backend/ncnn/ncnn_ext.*'))
|
||||
osp.join(MMDEPLOY_DIR, 'mmdeploy/backend/ncnn/ncnn_ext.*'))
|
||||
for ncnn_ext_path in ncnn_ext_paths:
|
||||
os.remove(ncnn_ext_path)
|
||||
|
||||
# remove ts_optmizer
|
||||
ts_optimizer_paths = glob(
|
||||
osp.join(mmdeploy_dir, 'mmdeploy/backend/torchscript/ts_optimizer.*'))
|
||||
osp.join(MMDEPLOY_DIR, 'mmdeploy/backend/torchscript/ts_optimizer.*'))
|
||||
for ts_optimizer_path in ts_optimizer_paths:
|
||||
os.remove(ts_optimizer_path)
|
||||
|
||||
|
||||
def build_mmdeploy(cfg, mmdeploy_dir, dist_dir=None):
|
||||
cmake_flags = cfg.get('cmake_flags', [])
|
||||
cmake_envs = cfg.get('cmake_envs', dict())
|
||||
|
||||
args = [f'-D{k}={v}' for k, v in cmake_envs.items()]
|
||||
|
||||
# clear mmdeploy
|
||||
clear_mmdeploy(mmdeploy_dir)
|
||||
|
||||
build_dir = osp.join(mmdeploy_dir, 'build')
|
||||
if not osp.exists(build_dir):
|
||||
os.mkdir(build_dir)
|
||||
|
||||
# cmake cmd
|
||||
cmake_cmd = ' '.join(['cmake ..'] + cmake_flags + args)
|
||||
_call_command(cmake_cmd, build_dir)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# build cmd
|
||||
build_cmd = 'cmake --build . --config Release -- /m'
|
||||
_call_command(build_cmd, build_dir)
|
||||
install_cmd = 'cmake --install . --config Release'
|
||||
_call_command(install_cmd, build_dir)
|
||||
_remove_if_exist(osp.join(build_dir, 'lib', 'Release'))
|
||||
else:
|
||||
# build cmd
|
||||
build_cmd = 'cmake --build . -- -j$(nproc) && cmake --install .'
|
||||
_call_command(build_cmd, build_dir)
|
||||
|
||||
# build wheel
|
||||
bdist_cmd = _create_bdist_cmd(cfg, c_ext=False, dist_dir=dist_dir)
|
||||
_call_command(bdist_cmd, mmdeploy_dir)
|
||||
|
||||
|
||||
def build_mmdeploy_python(python_executable, cfg, mmdeploy_dir):
|
||||
cmake_flags = cfg.get('cmake_flags', [])
|
||||
cmake_envs = cfg.get('cmake_envs', dict())
|
||||
|
||||
args = [f'-D{k}={v}' for k, v in cmake_envs.items()]
|
||||
args.append(
|
||||
f'-DMMDeploy_DIR={mmdeploy_dir}/build/install/lib/cmake/MMDeploy')
|
||||
args.append(f'-DPYTHON_EXECUTABLE={python_executable}')
|
||||
|
||||
if sys.platform == 'win32':
|
||||
build_cmd = 'cmake --build . --config Release -- /m'
|
||||
pass
|
||||
else:
|
||||
build_cmd = 'cmake --build . -- -j$(nproc)'
|
||||
cmake_cmd = ' '.join(['cmake ../csrc/mmdeploy/apis/python'] + cmake_flags +
|
||||
args)
|
||||
|
||||
build_dir = osp.join(mmdeploy_dir, 'build_python')
|
||||
_remove_if_exist(build_dir)
|
||||
os.mkdir(build_dir)
|
||||
|
||||
_call_command(cmake_cmd, build_dir)
|
||||
_call_command(build_cmd, build_dir)
|
||||
|
||||
python_api_lib_path = []
|
||||
lib_patterns = ['*mmdeploy_python*.so', '*mmdeploy_python*.pyd']
|
||||
for pattern in lib_patterns:
|
||||
python_api_lib_path.extend(
|
||||
glob(
|
||||
osp.join(mmdeploy_dir, 'build_python/**', pattern),
|
||||
recursive=True,
|
||||
))
|
||||
return python_api_lib_path[0]
|
||||
|
||||
|
||||
def get_dir_name(cfg, tag, default_name):
|
||||
if tag not in cfg:
|
||||
logging.warning(f'{tag} not found, use `{default_name}` as default.')
|
||||
else:
|
||||
default_name = cfg[tag]
|
||||
return cfg, default_name
|
||||
|
||||
|
||||
def check_env(cfg: Dict):
|
||||
env_info = {}
|
||||
|
||||
cmake_envs = cfg.get('cmake_envs', dict())
|
||||
cmake_envs = cfg.get('cmake_cfg', dict())
|
||||
|
||||
# system
|
||||
platform_system = platform.system().lower()
|
||||
|
@ -232,7 +139,9 @@ def check_env(cfg: Dict):
|
|||
# CUDA version
|
||||
cuda_version = 'unknown'
|
||||
|
||||
CUDA_TOOLKIT_ROOT_DIR = cmake_envs.get('CUDA_TOOLKIT_ROOT_DIR', '')
|
||||
CUDA_TOOLKIT_ROOT_DIR = os.environ.get('CUDA_TOOLKIT_ROOT_DIR', '')
|
||||
CUDA_TOOLKIT_ROOT_DIR = cmake_envs.get('CUDA_TOOLKIT_ROOT_DIR',
|
||||
CUDA_TOOLKIT_ROOT_DIR)
|
||||
CUDA_TOOLKIT_ROOT_DIR = osp.expandvars(CUDA_TOOLKIT_ROOT_DIR)
|
||||
nvcc_cmd = ('nvcc' if len(CUDA_TOOLKIT_ROOT_DIR) <= 0 else osp.join(
|
||||
CUDA_TOOLKIT_ROOT_DIR, 'bin', 'nvcc'))
|
||||
|
@ -278,7 +187,8 @@ def check_env(cfg: Dict):
|
|||
minor = re.search(r'#define NV_TENSORRT_MINOR (\d+)', data)
|
||||
patch = re.search(r'#define NV_TENSORRT_PATCH (\d+)', data)
|
||||
build = re.search(r'#define NV_TENSORRT_BUILD (\d+)', data)
|
||||
if major is not None and minor is not None and patch is not None:
|
||||
if major is not None and minor is not None and patch is not None \
|
||||
and build is not None:
|
||||
tensorrt_version = (f'{major.group(1)}.' +
|
||||
f'{minor.group(1)}.' +
|
||||
f'{patch.group(1)}.' + f'{build.group(1)}')
|
||||
|
@ -288,97 +198,266 @@ def check_env(cfg: Dict):
|
|||
return env_info
|
||||
|
||||
|
||||
def create_package(cfg: Dict, mmdeploy_dir: str):
|
||||
build_dir = 'build'
|
||||
sdk_tar_name = 'sdk'
|
||||
def build_mmdeploy(cfg: Dict):
|
||||
build_dir = osp.join(MMDEPLOY_DIR, 'build')
|
||||
if not osp.exists(build_dir):
|
||||
os.mkdir(build_dir)
|
||||
|
||||
# load flags
|
||||
cfg, build_dir = get_dir_name(cfg, 'BUILD_NAME', build_dir)
|
||||
cmake_envs = cfg.get('cmake_envs', dict())
|
||||
build_sdk_flag = cmake_envs.get('MMDEPLOY_BUILD_SDK', 'OFF')
|
||||
if 'TAR_NAME' in cfg:
|
||||
cfg, sdk_tar_name = get_dir_name(cfg, 'TAR_NAME', sdk_tar_name)
|
||||
cmake_cfg = cfg['cmake_cfg']
|
||||
cmake_options = [f'-D{k}="{v}"' for k, v in cmake_cfg.items() if v != '']
|
||||
if sys.platform == 'win32':
|
||||
cmake_windows_options = '-A x64 -T v142'
|
||||
if 'CUDA_PATH' in os.environ:
|
||||
cmake_windows_options += ',cuda="%CUDA_PATH%"'
|
||||
cmake_options = [cmake_windows_options] + cmake_options
|
||||
|
||||
# fill name
|
||||
# configure
|
||||
cmake_cmd = ' '.join(['cmake ..'] + cmake_options)
|
||||
_call_command(cmake_cmd, build_dir)
|
||||
# build
|
||||
if sys.platform == 'win32':
|
||||
build_cmd = 'cmake --build . --config Release -- /m'
|
||||
else:
|
||||
build_cmd = 'cmake --build . -- -j$(nproc)'
|
||||
_call_command(build_cmd, build_dir)
|
||||
# install
|
||||
install_cmd = 'cmake --install . --config Release'
|
||||
_call_command(install_cmd, build_dir)
|
||||
|
||||
|
||||
def copy_thirdparty(cfg: Dict, sdk_path: str):
|
||||
thirdparty_dir = osp.join(sdk_path, 'thirdparty')
|
||||
os.mkdir(thirdparty_dir)
|
||||
|
||||
def _copy_needed(src_dir, dst_dir, needed):
|
||||
if not osp.exists(dst_dir):
|
||||
os.makedirs(dst_dir)
|
||||
for path in needed:
|
||||
src_path = osp.join(src_dir, path[0])
|
||||
dst_path = osp.join(dst_dir, path[0])
|
||||
_copy(src_path, dst_path)
|
||||
if len(path) == 1 or path[1] == '**':
|
||||
continue
|
||||
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(dst_path)
|
||||
files = glob('**', recursive=True)
|
||||
reserve = []
|
||||
for pattern in path[1:]:
|
||||
reserve.extend(glob(pattern, recursive=True))
|
||||
|
||||
for file in files:
|
||||
if file not in reserve:
|
||||
_remove_if_exist(file)
|
||||
os.chdir(old_dir)
|
||||
|
||||
# copy onnxruntime, tensorrt
|
||||
backend = cfg['cmake_cfg']['MMDEPLOY_TARGET_BACKENDS']
|
||||
if 'ort' in backend:
|
||||
src_dir = cfg['cmake_cfg']['ONNXRUNTIME_DIR']
|
||||
dst_dir = osp.join(thirdparty_dir, 'onnxruntime')
|
||||
needed = [('include', '**'), ('lib', '**')]
|
||||
_copy_needed(src_dir, dst_dir, needed)
|
||||
if 'trt' in backend:
|
||||
src_dir = cfg['cmake_cfg']['TENSORRT_DIR']
|
||||
dst_dir = osp.join(thirdparty_dir, 'tensorrt')
|
||||
needed = [('include', '**'),
|
||||
('lib', 'libnvinfer_builder_resource.so*', 'libnvinfer.so*',
|
||||
'libnvinfer_plugin.so*', 'nvinfer_builder_resource.*',
|
||||
'nvinfer*', 'nvinfer_plugin*')]
|
||||
_copy_needed(src_dir, dst_dir, needed)
|
||||
|
||||
|
||||
def copy_scripts(sdk_path: str):
|
||||
scripts_base = osp.join(MMDEPLOY_DIR, 'tools', 'package_tools', 'scripts')
|
||||
if sys.platform == 'win32':
|
||||
src_dir = osp.join(scripts_base, 'windows')
|
||||
elif sys.platform == 'linux':
|
||||
src_dir = osp.join(scripts_base, 'linux')
|
||||
else:
|
||||
raise Exception('unsupported')
|
||||
files = glob(osp.join(src_dir, '*'))
|
||||
for file in files:
|
||||
filename = osp.basename(file)
|
||||
src_path = osp.join(src_dir, filename)
|
||||
dst_path = osp.join(sdk_path, filename)
|
||||
_copy(src_path, dst_path)
|
||||
|
||||
|
||||
def copy_onnxruntime(cfg, dst_dir):
|
||||
ort_root = cfg['cmake_cfg']['ONNXRUNTIME_DIR']
|
||||
patterns = ['libonnxruntime.so.*', 'onnxruntime.dll']
|
||||
for pattern in patterns:
|
||||
src_lib = glob(osp.join(ort_root, 'lib', pattern))
|
||||
if len(src_lib) > 0:
|
||||
dst_lib = osp.join(dst_dir, osp.basename(src_lib[0]))
|
||||
_copy(src_lib[0], dst_lib)
|
||||
|
||||
|
||||
def create_mmdeploy(cfg: Dict, work_dir: str):
|
||||
if cfg['BUILD_MMDEPLOY'] == 'OFF':
|
||||
logging.info('Skip build mmdeploy package')
|
||||
return
|
||||
|
||||
dist_dir = osp.join(work_dir, 'mmdeploy')
|
||||
if osp.exists(dist_dir):
|
||||
logging.info('mmdeploy existed, deleting...')
|
||||
shutil.rmtree(dist_dir)
|
||||
|
||||
clear_mmdeploy()
|
||||
build_mmdeploy(cfg)
|
||||
|
||||
# copy libonnxruntime.so.x.y.z
|
||||
backend = cfg['cmake_cfg']['MMDEPLOY_TARGET_BACKENDS']
|
||||
if 'ort' in backend:
|
||||
dst_dir = osp.join(MMDEPLOY_DIR, 'mmdeploy', 'lib')
|
||||
copy_onnxruntime(cfg, dst_dir)
|
||||
|
||||
# build wheel
|
||||
build_dir = osp.join(MMDEPLOY_DIR, 'build')
|
||||
_remove_if_exist(osp.join(build_dir, 'lib'))
|
||||
_remove_if_exist(osp.join(build_dir, 'lib', 'Release'))
|
||||
bdist_cmd = _create_bdist_cmd(cfg, c_ext=False, dist_dir=dist_dir)
|
||||
_call_command(bdist_cmd, MMDEPLOY_DIR)
|
||||
|
||||
|
||||
def create_mmdeploy_runtime(cfg: Dict, work_dir: str):
|
||||
cmake_cfg = cfg['cmake_cfg']
|
||||
if cmake_cfg['MMDEPLOY_BUILD_SDK'] == 'OFF' or \
|
||||
cmake_cfg['MMDEPLOY_BUILD_SDK_PYTHON_API'] == 'OFF':
|
||||
logging.info('Skip build mmdeploy sdk python api')
|
||||
return
|
||||
|
||||
for python_version in ['3.6', '3.7', '3.8', '3.9', '3.10']:
|
||||
_version = version.parse(python_version)
|
||||
python_major = _version.major
|
||||
python_minor = _version.minor
|
||||
# create sdk python api wheel
|
||||
sdk_python_package_dir = osp.join(work_dir, '.mmdeploy_runtime')
|
||||
_copy(PACKAGING_DIR, sdk_python_package_dir)
|
||||
_copy(
|
||||
osp.join(MMDEPLOY_DIR, 'mmdeploy', 'version.py'),
|
||||
osp.join(sdk_python_package_dir, 'mmdeploy_runtime', 'version.py'),
|
||||
)
|
||||
|
||||
# build mmdeploy_runtime
|
||||
python_executable = shutil.which('python')\
|
||||
.replace('mmdeploy-3.6', f'mmdeploy-{python_version}')
|
||||
cmake_options = [
|
||||
f'-D{k}="{v}"' for k, v in cmake_cfg.items() if v != ''
|
||||
]
|
||||
cmake_options.append(
|
||||
f'-DMMDeploy_DIR={MMDEPLOY_DIR}/build/install/lib/cmake/MMDeploy')
|
||||
cmake_options.append(f'-DPYTHON_EXECUTABLE={python_executable}')
|
||||
if sys.platform == 'win32':
|
||||
cmake_options.append('-A x64 -T v142')
|
||||
if 'CUDA_PATH' in os.environ:
|
||||
cmake_options[-1] += ',cuda="%CUDA_PATH%"'
|
||||
cmake_cmd = ' '.join(['cmake ../csrc/mmdeploy/apis/python'] +
|
||||
cmake_options)
|
||||
build_dir = osp.join(MMDEPLOY_DIR, 'build_python')
|
||||
_remove_if_exist(build_dir)
|
||||
os.mkdir(build_dir)
|
||||
_call_command(cmake_cmd, build_dir)
|
||||
if sys.platform == 'win32':
|
||||
build_cmd = 'cmake --build . --config Release -- /m'
|
||||
else:
|
||||
build_cmd = 'cmake --build . -- -j$(nproc)'
|
||||
_call_command(build_cmd, build_dir)
|
||||
|
||||
# copy api lib
|
||||
python_api_lib_path = []
|
||||
lib_patterns = ['*mmdeploy_runtime*.so', '*mmdeploy_runtime*.pyd']
|
||||
for pattern in lib_patterns:
|
||||
python_api_lib_path.extend(
|
||||
glob(
|
||||
osp.join(MMDEPLOY_DIR, 'build_python/**', pattern),
|
||||
recursive=True,
|
||||
))
|
||||
_copy(
|
||||
python_api_lib_path[0],
|
||||
osp.join(sdk_python_package_dir, 'mmdeploy_runtime'),
|
||||
)
|
||||
_remove_if_exist(osp.join(MMDEPLOY_DIR, 'build_python'))
|
||||
|
||||
# copy net & mmdeploy
|
||||
if sys.platform == 'win32':
|
||||
libs_to_copy = ['*net.dll', 'mmdeploy.dll']
|
||||
search_dir = osp.join(MMDEPLOY_DIR, 'build', 'install', 'bin')
|
||||
elif sys.platform == 'linux':
|
||||
libs_to_copy = ['*net.so', '*mmdeploy.so.0']
|
||||
search_dir = osp.join(MMDEPLOY_DIR, 'build', 'install', 'lib')
|
||||
else:
|
||||
raise Exception('unsupported')
|
||||
|
||||
for pattern in libs_to_copy:
|
||||
files = glob(osp.join(search_dir, pattern))
|
||||
for file in files:
|
||||
_copy(file, osp.join(sdk_python_package_dir,
|
||||
'mmdeploy_runtime'))
|
||||
|
||||
# copy onnxruntime
|
||||
if 'ort' in cfg['cmake_cfg']['MMDEPLOY_TARGET_BACKENDS']:
|
||||
copy_onnxruntime(
|
||||
cfg, osp.join(sdk_python_package_dir, 'mmdeploy_runtime'))
|
||||
|
||||
# bdist
|
||||
sdk_wheel_dir = osp.join(work_dir, 'mmdeploy_runtime')
|
||||
cfg['bdist_tags'] = {'python_tag': f'cp{python_major}{python_minor}'}
|
||||
bdist_cmd = _create_bdist_cmd(cfg, c_ext=True, dist_dir=sdk_wheel_dir)
|
||||
if 'cuda' in cmake_cfg['MMDEPLOY_TARGET_DEVICES']:
|
||||
bdist_cmd += ' --use-gpu'
|
||||
_call_command(bdist_cmd, '.mmdeploy_runtime')
|
||||
_remove_if_exist(sdk_python_package_dir)
|
||||
|
||||
|
||||
def create_sdk(cfg: Dict, work_dir: str):
|
||||
cmake_cfg = cfg['cmake_cfg']
|
||||
if cmake_cfg['MMDEPLOY_BUILD_SDK'] == 'OFF':
|
||||
logging.info('Skip build mmdeploy sdk')
|
||||
return
|
||||
|
||||
cfg = copy.deepcopy(cfg)
|
||||
cfg['cmake_cfg']['MMDEPLOY_BUILD_SDK_PYTHON_API'] = 'OFF'
|
||||
clear_mmdeploy()
|
||||
build_mmdeploy(cfg)
|
||||
|
||||
sdk_root = osp.abspath(osp.join(work_dir, 'sdk'))
|
||||
build_sdk_name = cfg['BUILD_SDK_NAME']
|
||||
env_info = check_env(cfg)
|
||||
version_file = osp.join(mmdeploy_dir, 'mmdeploy', 'version.py')
|
||||
version_file = osp.join(MMDEPLOY_DIR, 'mmdeploy', 'version.py')
|
||||
mmdeploy_version = get_version(version_file)
|
||||
build_dir = build_dir.format(mmdeploy_v=mmdeploy_version, **env_info)
|
||||
build_sdk_name = build_sdk_name.format(
|
||||
mmdeploy_v=mmdeploy_version, **env_info)
|
||||
sdk_path = osp.join(sdk_root, build_sdk_name)
|
||||
|
||||
# create package directory.
|
||||
if osp.exists(build_dir):
|
||||
logging.info(f'{build_dir} existed, deleting...')
|
||||
shutil.rmtree(build_dir)
|
||||
os.mkdir(build_dir)
|
||||
if osp.exists(sdk_path):
|
||||
logging.info(f'{sdk_path}, deleting...')
|
||||
shutil.rmtree(sdk_path)
|
||||
os.makedirs(sdk_path)
|
||||
|
||||
logging.info(f'build mmdeploy in {build_dir}:')
|
||||
logging.debug(f'with config: {cfg}')
|
||||
install_dir = osp.join(MMDEPLOY_DIR, 'build/install/')
|
||||
_copy(install_dir, sdk_path)
|
||||
_copy(f'{MMDEPLOY_DIR}/demo/python', f'{sdk_path}/example/python')
|
||||
_remove_if_exist(osp.join(sdk_path, 'example', 'build'))
|
||||
|
||||
try:
|
||||
# build dist
|
||||
dist_dir = osp.join(build_dir, 'dist')
|
||||
build_mmdeploy(cfg, mmdeploy_dir, dist_dir=dist_dir)
|
||||
# copy thirdparty
|
||||
copy_thirdparty(cfg, sdk_path)
|
||||
# copy scripts
|
||||
copy_scripts(sdk_path)
|
||||
|
||||
if build_sdk_flag == 'ON':
|
||||
|
||||
sdk_tar_dir = osp.join(build_dir, sdk_tar_name)
|
||||
|
||||
# copy lib and install into sdk dir
|
||||
install_dir = osp.join(mmdeploy_dir, 'build/install/')
|
||||
_copy(install_dir, sdk_tar_dir)
|
||||
_copy(f'{mmdeploy_dir}/demo/python',
|
||||
f'{sdk_tar_dir}/example/python')
|
||||
_remove_if_exist(osp.join(sdk_tar_dir, 'example', 'build'))
|
||||
|
||||
# build SDK Python API according to different python version
|
||||
for python_version in ['3.6', '3.7', '3.8', '3.9']:
|
||||
_version = version.parse(python_version)
|
||||
python_major, python_minor = _version.major, _version.minor
|
||||
|
||||
# create sdk python api wheel
|
||||
sdk_python_package_dir = osp.join(build_dir,
|
||||
'.mmdeploy_python')
|
||||
_copy(PACKAGING_DIR, sdk_python_package_dir)
|
||||
_copy(
|
||||
osp.join(mmdeploy_dir, 'mmdeploy', 'version.py'),
|
||||
osp.join(sdk_python_package_dir, 'mmdeploy_python',
|
||||
'version.py'),
|
||||
)
|
||||
|
||||
# build mmdeploy sdk python api
|
||||
python_executable = shutil.which('python')\
|
||||
.replace('mmdeploy-3.6', f'mmdeploy-{python_version}')
|
||||
python_api_lib_path = build_mmdeploy_python(
|
||||
python_executable, cfg, mmdeploy_dir)
|
||||
_copy(
|
||||
python_api_lib_path,
|
||||
osp.join(sdk_python_package_dir, 'mmdeploy_python'),
|
||||
)
|
||||
_remove_if_exist(osp.join(mmdeploy_dir, 'build_python'))
|
||||
|
||||
sdk_wheel_dir = osp.abspath(osp.join(sdk_tar_dir, 'python'))
|
||||
|
||||
bdist_cmd = (f'{python_executable} '
|
||||
f'setup.py bdist_wheel --plat-name '
|
||||
f'{PLATFORM_TAG} --python-tag '
|
||||
f'cp{python_major}{python_minor} '
|
||||
f'--dist-dir {sdk_wheel_dir}')
|
||||
_call_command(bdist_cmd, sdk_python_package_dir)
|
||||
|
||||
# remove temp package dir
|
||||
_remove_if_exist(sdk_python_package_dir)
|
||||
|
||||
logging.info('build finish.')
|
||||
|
||||
except CalledProcessError:
|
||||
logging.error('build failed')
|
||||
exit(-1)
|
||||
def create_package(cfg: Dict, work_dir: str):
|
||||
create_mmdeploy(cfg, work_dir)
|
||||
create_sdk(cfg, work_dir)
|
||||
create_mmdeploy_runtime(cfg, work_dir)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description='Build mmdeploy from yaml.')
|
||||
parser.add_argument('build_cfgs', help='The build config yaml file.')
|
||||
parser.add_argument('mmdeploy_dir', help='The source code of MMDeploy.')
|
||||
parser.add_argument('--config', help='The build config yaml file.')
|
||||
parser.add_argument(
|
||||
'--output-dir', default='.', help='Output package directory.')
|
||||
args = parser.parse_args()
|
||||
|
||||
return args
|
||||
|
@ -386,28 +465,18 @@ def parse_args():
|
|||
|
||||
def parse_configs(cfg_path: str):
|
||||
with open(cfg_path, mode='r') as f:
|
||||
cfgs = yaml.load(f, yaml.Loader)
|
||||
|
||||
global_cfg = cfgs.get('global_config', dict())
|
||||
local_cfgs = cfgs.get('local_configs', [])
|
||||
|
||||
merged_cfgs = [
|
||||
_merge_cfg(global_cfg, local_cfg) for local_cfg in local_cfgs
|
||||
]
|
||||
|
||||
return merged_cfgs
|
||||
config = yaml.load(f, yaml.Loader)
|
||||
logging.info(f'Load config\n{yaml.dump(config)}')
|
||||
return config
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
cfgs = parse_configs(args.build_cfgs)
|
||||
mmdeploy_dir = osp.abspath(args.mmdeploy_dir)
|
||||
logging.info(f'Using mmdeploy_dir: {mmdeploy_dir}')
|
||||
|
||||
logging.info(f'Using PACKAGING_DIR: {PACKAGING_DIR}')
|
||||
|
||||
for cfg in cfgs:
|
||||
create_package(cfg, mmdeploy_dir)
|
||||
cfg = parse_configs(args.config)
|
||||
work_dir = osp.abspath(args.output_dir)
|
||||
logging.info(f'Using mmdeploy_dir: {MMDEPLOY_DIR}')
|
||||
logging.info(f'Using output_dir: {work_dir}')
|
||||
create_package(cfg, work_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
include mmdeploy_python/*.so
|
||||
include mmdeploy_python/*.dll
|
||||
include mmdeploy_python/*.pyd
|
||||
include mmdeploy_runtime/*.so*
|
||||
include mmdeploy_runtime/*.dll
|
||||
include mmdeploy_runtime/*.pyd
|
||||
|
|
|
@ -3,9 +3,14 @@
|
|||
import ctypes
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
from .version import __version__
|
||||
|
||||
if sys.platform == 'win32':
|
||||
os.environ['PATH'] = f'{os.path.dirname(__file__)};{os.environ["PATH"]}'
|
||||
from . import _win_dll_path # noqa F401
|
||||
|
||||
|
||||
def try_load(library):
|
||||
try:
|
||||
|
@ -18,6 +23,6 @@ CURDIR = os.path.realpath(os.path.dirname(__file__))
|
|||
for lib in glob.iglob(os.path.join(CURDIR, '*.so*')):
|
||||
try_load(lib)
|
||||
|
||||
from .mmdeploy_python import * # noqa
|
||||
from .mmdeploy_runtime import * # noqa
|
||||
|
||||
__all__ = ['__version__']
|
|
@ -0,0 +1 @@
|
|||
# Copyright (c) OpenMMLab. All rights reserved.
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import os.path as osp
|
||||
import platform
|
||||
import sys
|
||||
|
||||
try:
|
||||
from setuptools import find_packages, setup
|
||||
|
@ -8,7 +9,8 @@ except ImportError:
|
|||
from distutils.core import find_packages, setup
|
||||
|
||||
CURDIR = os.path.realpath(os.path.dirname(__file__))
|
||||
version_file = osp.join(CURDIR, 'mmdeploy_python', 'version.py')
|
||||
version_file = osp.join(CURDIR, 'mmdeploy_runtime', 'version.py')
|
||||
package_name = 'mmdeploy_runtime'
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@ -21,9 +23,36 @@ def get_platform_name():
|
|||
return platform.machine()
|
||||
|
||||
|
||||
def parse_arg_remove_boolean(argv, arg_name):
|
||||
arg_value = False
|
||||
if arg_name in sys.argv:
|
||||
arg_value = True
|
||||
argv.remove(arg_name)
|
||||
|
||||
return arg_value
|
||||
|
||||
|
||||
if parse_arg_remove_boolean(sys.argv, '--use-gpu'):
|
||||
package_name = package_name + '_gpu'
|
||||
if sys.platform == 'win32':
|
||||
with open('mmdeploy_runtime/_win_dll_path.py', 'a') as f:
|
||||
code = \
|
||||
'import os\n' \
|
||||
'import sys\n\n' \
|
||||
'cuda_bin_dir = ""\n' \
|
||||
'if "CUDA_PATH" in os.environ:\n' \
|
||||
' cuda_bin_dir = os.path.join(os.environ["CUDA_PATH"], "bin")\n' \
|
||||
'else:\n' \
|
||||
' raise ImportError("Can\'t find environment variable CUDA_PATH")\n' \
|
||||
'if sys.version_info >= (3, 8):\n' \
|
||||
' os.add_dll_directory(cuda_bin_dir)\n' \
|
||||
'else:\n' \
|
||||
' os.environ["PATH"] = cuda_bin_dir + os.pathsep + os.environ["PATH"]'
|
||||
f.write(code)
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup(
|
||||
name='mmdeploy_python',
|
||||
name=package_name,
|
||||
version=get_version(),
|
||||
description='OpenMMLab Model Deployment SDK python api',
|
||||
author='OpenMMLab',
|
||||
|
@ -33,5 +62,5 @@ if __name__ == '__main__':
|
|||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
platforms=get_platform_name(),
|
||||
package_data={'mmdeploy_python': ['*.so*', '*.pyd', '*.pdb']},
|
||||
package_data={'mmdeploy_runtime': ['*.so*', '*.pyd', '*.pdb']},
|
||||
license='Apache License 2.0')
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
# build sdk
|
||||
|
||||
1. installed opencv (you can skip this step if you have installed it)
|
||||
in sdk folder:
|
||||
|
||||
`./install_opencv.sh`
|
||||
|
||||
2. set environment variable and path
|
||||
in sdk folder:
|
||||
|
||||
`source ./set_env.sh` \
|
||||
(**you have to additionally install cuda and cudnn if you use sdk cuda version**)
|
||||
|
||||
3. build sdk
|
||||
in sdk folder:
|
||||
|
||||
`./build_sdk.sh` \
|
||||
(if you installed opencv by ./install_opencv.sh)
|
||||
|
||||
or
|
||||
|
||||
`./build_sdk.sh "path/to/folder/of/OpenCVConfig.cmake"` \
|
||||
(if you installed opencv yourself)
|
||||
|
||||
the executable will be generated in: `bin/`
|
|
@ -0,0 +1,50 @@
|
|||
#!/bin/bash
|
||||
|
||||
WORKSPACE=$(realpath $(dirname "$0"))
|
||||
OPENCV_DIR=""
|
||||
|
||||
if [ -n "$1" ]; then
|
||||
OPENCV_DIR=$(cd "$1"; pwd)
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "opencv path $1 doesn't exist"
|
||||
exit 1
|
||||
fi
|
||||
if [ ! -f "$OPENCV_DIR/OpenCVConfig.cmake" ]; then
|
||||
echo "opencv path $1 doesn't contains OpenCVConfig.cmake"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$OPENCV_DIR" ]; then
|
||||
# search thirdparty
|
||||
OPENCV_DIR="${WORKSPACE}/thirdparty/opencv/install/lib64/cmake/opencv4"
|
||||
_OPENCV_DIR="${WORKSPACE}/thirdparty/opencv/install/lib/cmake/opencv4"
|
||||
if [ -d "$OPENCV_DIR" ]; then
|
||||
echo "Found OPENCV_DIR= $OPENCV_DIR"
|
||||
elif [ -d "$_OPENCV_DIR" ]; then
|
||||
OPENCV_DIR=$_OPENCV_DIR
|
||||
echo "Found OPENCV_DIR= $OPENCV_DIR"
|
||||
else
|
||||
echo "Can't find opencv, please provide OPENCV_DIR or install it by install_opencv.sh"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
MMDEPLOY_DIR="$WORKSPACE/lib/cmake/MMDeploy"
|
||||
|
||||
BUILD_DIR="${WORKSPACE}/example/cpp/build"
|
||||
if [ -d "${BUILD_DIR}" ]; then
|
||||
rm -rf "${BUILD_DIR}"
|
||||
fi
|
||||
|
||||
mkdir -p ${BUILD_DIR}
|
||||
cd ${BUILD_DIR}
|
||||
|
||||
|
||||
cmake .. -DMMDeploy_DIR="$MMDEPLOY_DIR" \
|
||||
-DOpenCV_DIR="${OPENCV_DIR}"
|
||||
|
||||
make -j $(nproc)
|
||||
|
||||
cd ${WORKSPACE}
|
||||
ln -sf ${BUILD_DIR} bin
|
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
opencvVer="4.5.5"
|
||||
|
||||
WORKSPACE=$(realpath $(dirname "$0"))
|
||||
THIRDPARTY_DIR="${WORKSPACE}/thirdparty"
|
||||
|
||||
if [ ! -d $THIRDPARTY_DIR ]; then
|
||||
echo $THIRDPARTY_DIR
|
||||
mkdir -p $THIRDPARTY_DIR
|
||||
fi
|
||||
|
||||
pushd ${THIRDPARTY_DIR}
|
||||
|
||||
url="https://github.com/opencv/opencv/archive/refs/tags/$opencvVer.tar.gz"
|
||||
wget $url
|
||||
tar xf $opencvVer.tar.gz
|
||||
mv opencv-$opencvVer opencv
|
||||
|
||||
pushd opencv
|
||||
|
||||
mkdir build
|
||||
pushd build
|
||||
cmake .. -DBUILD_TESTS=OFF -DBUILD_EXAMPLES=OFF -DCMAKE_INSTALL_PREFIX=../install
|
||||
make -j$(nproc)
|
||||
make install
|
||||
|
||||
pushd -3
|
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [ -n "$1" ]; then
|
||||
WORKSPACE=$1
|
||||
else
|
||||
WORKSPACE=$(realpath $(dirname "${BASH_SOURCE[0]}"))
|
||||
fi
|
||||
|
||||
THIRDPARTY_DIR=$WORKSPACE/thirdparty
|
||||
|
||||
pushd $THIRDPARTY_DIR
|
||||
|
||||
if [ -d onnxruntime ]; then
|
||||
export ONNXRUNTIME_DIR=$THIRDPARTY_DIR/onnxruntime
|
||||
export LD_LIBRARY_PATH=$ONNXRUNTIME_DIR/lib:$LD_LIBRARY_PATH
|
||||
fi
|
||||
|
||||
if [ -d tensorrt ]; then
|
||||
export TENSORRT_DIR=$THIRDPARTY_DIR/tensorrt
|
||||
export LD_LIBRARY_PATH=$TENSORRT_DIR/lib:$LD_LIBRARY_PATH
|
||||
fi
|
||||
|
||||
if [ -d openvino ]; then
|
||||
export InferenceEngine_DIR=$THIRDPARTY_DIR/runtime/cmake
|
||||
sopaths=$(find $(pwd)/openvino -name "*.so" -exec dirname {} \; | uniq | tr '\n' ':')
|
||||
export LD_LIBRARY_PATH=$sopaths$LD_LIBRARY_PATH
|
||||
fi
|
||||
|
||||
popd
|
|
@ -0,0 +1,29 @@
|
|||
# build sdk
|
||||
|
||||
1. open windows powerShell with administrator privileges
|
||||
set-ExecutionPolicy RemoteSigned
|
||||
|
||||
2. installed opencv (you can skip this step if you have installed it)
|
||||
in sdk folder:
|
||||
|
||||
`.\install_opencv.ps1`
|
||||
|
||||
3. set environment variable and path
|
||||
in sdk folder:
|
||||
|
||||
`. .\set_env.ps1`
|
||||
(you have to additionally install cuda and cudnn if you use sdk cuda version)
|
||||
|
||||
4. build sdk
|
||||
in sdk folder:
|
||||
|
||||
`. .\build_sdk.ps1` \
|
||||
(if you installed opencv by install_opencv.ps1)
|
||||
|
||||
or
|
||||
|
||||
`. .\build_sdk.ps1 "path/to/folder/of/OpenCVConfig.cmake"` \
|
||||
(if you installed opencv yourself)
|
||||
|
||||
the executable will be generated in:
|
||||
`example\cpp\build\Release`
|
|
@ -0,0 +1,54 @@
|
|||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$WORKSPACE = $PSScriptRoot
|
||||
$OPENCV_DIR = ""
|
||||
|
||||
if ($args.Count -gt 0) {
|
||||
$OPENCV_DIR = $args[0]
|
||||
if (-Not (Test-Path -Path $OPENCV_DIR -PathType Container)) {
|
||||
Write-Error "OPENCV_DIR $OPENCV_DIR doesn't exist"
|
||||
Exit 1
|
||||
}
|
||||
$OPENCV_CONFIG = [IO.PATH]::Combine("$OPENCV_DIR", "OpenCVConfig.cmake")
|
||||
if (-Not (Test-Path -Path $OPENCV_CONFIG -PathType Leaf)) {
|
||||
Write-Error "OPENCV_DIR $OPENCV_DIR doesn't contains OpenCVConfig.cmake"
|
||||
Exit 1
|
||||
}
|
||||
}
|
||||
|
||||
if ($OPENCV_DIR -eq "") {
|
||||
# search thirdparty
|
||||
$THIRDPARTY_DIR = "${WORKSPACE}/thirdparty"
|
||||
$THIRD_OPENCV = [IO.Path]::Combine("$THIRDPARTY_DIR", "opencv", "install")
|
||||
if (-Not (Test-Path $THIRD_OPENCV -PathType Container)) {
|
||||
Write-Error "Can't find opencv, please provide OPENCV_DIR or install it by install_opencv.ps1"
|
||||
Exit 1
|
||||
}
|
||||
$OPENCV_DIR = $THIRD_OPENCV
|
||||
}
|
||||
|
||||
$MMDEPLOY_DIR = [IO.Path]::Combine("$WORKSPACE", "lib", "cmake", "MMDeploy")
|
||||
|
||||
$BUILD_DIR = "${WORKSPACE}/example/cpp/build"
|
||||
if (Test-Path -Path $BUILD_DIR -PathType Container) {
|
||||
Remove-Item $BUILD_DIR -Recurse
|
||||
}
|
||||
|
||||
New-Item -Path $BUILD_DIR -ItemType Directory
|
||||
Push-Location $BUILD_DIR
|
||||
|
||||
Write-Host $MMDEPLOY_DIR
|
||||
|
||||
$MSVC_TOOLSET = "-T v142"
|
||||
if ($env:CUDA_PATH -ne "") {
|
||||
$MSVC_TOOLSET = "$MSVC_TOOLSET,cuda=$env:CUDA_PATH"
|
||||
Write-Host $MSVC_TOOLSET
|
||||
}
|
||||
|
||||
cmake .. -A x64 $MSVC_TOOLSET `
|
||||
-DMMDeploy_DIR="$MMDEPLOY_DIR" `
|
||||
-DOpenCV_DIR="$OPENCV_DIR"
|
||||
|
||||
cmake --build . --config Release
|
||||
|
||||
Pop-Location
|
|
@ -0,0 +1,34 @@
|
|||
$opencvVer = "4.5.5"
|
||||
# ----
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$WORKSPACE = $PSScriptRoot
|
||||
$THIRDPARTY_DIR = "${WORKSPACE}/thirdparty"
|
||||
$OPENCV_DIR = "${THIRDPARTY_DIR}/opencv/install"
|
||||
|
||||
if (-Not (Test-Path -Path $THIRDPARTY_DIR -PathType Container)) {
|
||||
New-Item -Path $THIRDPARTY_DIR -ItemType Directory
|
||||
}
|
||||
|
||||
Push-Location "${THIRDPARTY_DIR}"
|
||||
|
||||
$url = "https://github.com/opencv/opencv/archive/refs/tags/$opencvVer.zip"
|
||||
$fileName = [IO.Path]::GetFileName($url)
|
||||
Start-BitsTransfer $url $fileName
|
||||
Expand-Archive -Path $fileName -DestinationPath "." -Force
|
||||
Move-Item "opencv-$opencvVer" "opencv"
|
||||
Push-Location "opencv"
|
||||
New-Item -Path "build" -ItemType Directory
|
||||
Push-Location build
|
||||
|
||||
cmake .. -A x64 -T v142 `
|
||||
-DBUILD_TESTS=OFF `
|
||||
-DBUILD_PERF_TESTS=OFF `
|
||||
-DCMAKE_INSTALL_PREFIX="${OPENCV_DIR}"
|
||||
|
||||
cmake --build . --config Release -j6
|
||||
cmake --install . --config Release
|
||||
|
||||
Pop-Location
|
||||
Pop-Location
|
||||
Pop-Location
|
|
@ -0,0 +1,35 @@
|
|||
$WORKSPACE = $PSScriptRoot
|
||||
$THIRDPARTY_DIR = "${WORKSPACE}/thirdparty"
|
||||
Push-Location $THIRDPARTY_DIR
|
||||
|
||||
if (Test-Path -Path "onnxruntime" -PathType Container) {
|
||||
$dir = [IO.Path]::Combine("$pwd", "onnxruntime")
|
||||
$env:ONNXRUNTIME_DIR = $dir
|
||||
$path = [IO.Path]::Combine("$dir", "lib")
|
||||
$env:PATH = "$path;$env:PATH"
|
||||
}
|
||||
|
||||
if (Test-Path -Path "tensorrt" -PathType Container) {
|
||||
$dir = [IO.Path]::Combine("$pwd", "tensorrt")
|
||||
$env:TENSORRT_DIR = $dir
|
||||
$path = [IO.Path]::Combine("$dir", "lib")
|
||||
$env:PATH = "$path;$env:PATH"
|
||||
}
|
||||
|
||||
if (Test-Path -Path "openvino" -PathType Container) {
|
||||
$root = [IO.Path]::Combine("$pwd", "openvino")
|
||||
$dir = [IO.Path]::Combine("root", "runtime", "cmake")
|
||||
$env:InferenceEngine_DIR = $dir
|
||||
$paths = Get-ChildItem -Path $root -Filter "*.dll" -Recurse | `
|
||||
ForEach-Object { $_.Directory.FullName } | Get-Unique
|
||||
foreach ($path in $paths) {
|
||||
$env:PATH = "$path;$env:PATH"
|
||||
Write-Host $path
|
||||
}
|
||||
}
|
||||
|
||||
$path = [IO.Path]::Combine("$WORKSPACE", "bin")
|
||||
$env:PATH = "$path;$env:PATH"
|
||||
|
||||
|
||||
Pop-Location
|
|
@ -0,0 +1,48 @@
|
|||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$WORKSPACE = ""
|
||||
$MODEL_DIR = "D:\DEPS\citest\mmcls"
|
||||
$SDK_DIR = "sdk"
|
||||
|
||||
if ($args.Count -gt 0) {
|
||||
$WORKSPACE = $args[0]
|
||||
}
|
||||
|
||||
Push-Location $WORKSPACE
|
||||
Push-Location $SDK_DIR
|
||||
|
||||
$pkgs = $(ls).Name
|
||||
$test_pkg = $pkgs[0]
|
||||
if ($pkgs.Count -gt 1) {
|
||||
foreach ($pkg in $pkgs) {
|
||||
if ($pkg -like '*cpu*') {
|
||||
$test_pkg = $pkg
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$work_dir=[IO.Path]::Combine("$env:TMP", [guid]::NewGuid().ToString())
|
||||
Copy-item $test_pkg $work_dir -Recurse
|
||||
Push-Location $work_dir
|
||||
|
||||
|
||||
# opencv
|
||||
if (-Not (Test-Path $env:OpenCV_DIR)) {
|
||||
.\install_opencv.ps1
|
||||
}
|
||||
|
||||
# env
|
||||
. .\set_env.ps1
|
||||
|
||||
# build
|
||||
.\build_sdk.ps1 $env:OpenCV_DIR
|
||||
|
||||
# run
|
||||
.\example\cpp\build\Release\classifier.exe "D:\DEPS\citest\mmcls" "$MODEL_DIR\demo.jpg"
|
||||
|
||||
Pop-Location
|
||||
Remove-Item $work_dir -Recurse
|
||||
|
||||
Pop-Location
|
||||
Pop-Location
|
|
@ -0,0 +1,35 @@
|
|||
set -e
|
||||
|
||||
WORKSPACE="."
|
||||
MODEL_DIR="/__w/mmdeploy/testmodel/mmcls"
|
||||
SDK_DIR="sdk"
|
||||
|
||||
if [[ -n "$1" ]]; then
|
||||
WORKSPACE=$1
|
||||
fi
|
||||
|
||||
pushd $WORKSPACE
|
||||
pushd $SDK_DIR
|
||||
|
||||
test_pkg=$(ls | grep *cpu*)
|
||||
work_dir=/tmp/_test
|
||||
cp -r $test_pkg $work_dir
|
||||
|
||||
pushd $work_dir
|
||||
|
||||
# opencv
|
||||
if [[ ! -d $OpenCV_DIR ]]; then
|
||||
./install_opencv.sh
|
||||
fi
|
||||
|
||||
# env
|
||||
source ./set_env.sh $(pwd)
|
||||
|
||||
# build
|
||||
./build_sdk.sh $OpenCV_DIR
|
||||
|
||||
# run
|
||||
./bin/classifier $MODEL_DIR $MODEL_DIR/demo.jpg
|
||||
|
||||
popd
|
||||
rm -rf $work_dir
|
|
@ -0,0 +1,44 @@
|
|||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$WORKSPACE = ""
|
||||
$MODEL_DIR = "D:\DEPS\citest\mmcls"
|
||||
$SDK_PYTHON_DIR = "mmdeploy_runtime"
|
||||
|
||||
if ($args.Count -gt 0) {
|
||||
$WORKSPACE = $args[0]
|
||||
}
|
||||
|
||||
Push-Location $WORKSPACE
|
||||
Push-Location $SDK_PYTHON_DIR
|
||||
|
||||
$pkgs = $(ls).Name
|
||||
$test_pkg = ""
|
||||
if ($pkgs.Count -gt 1) {
|
||||
foreach ($pkg in $pkgs) {
|
||||
if ($pkg -like 'mmdeploy_runtime-*cp38*') {
|
||||
$test_pkg = $pkg
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pip install $test_pkg --force-reinstall
|
||||
|
||||
$code = "
|
||||
import cv2
|
||||
from mmdeploy_runtime import Classifier
|
||||
import sys
|
||||
handle = Classifier('$MODEL_DIR', 'cpu', 0)
|
||||
img = cv2.imread('$MODEL_DIR\demo.jpg')
|
||||
try:
|
||||
res = handle(img)
|
||||
print(res)
|
||||
except:
|
||||
print('error')
|
||||
sys.exit(1)
|
||||
"
|
||||
|
||||
python -c $code
|
||||
|
||||
Pop-Location
|
||||
Pop-Location
|
|
@ -0,0 +1,34 @@
|
|||
set -e
|
||||
|
||||
WORKSPACE="."
|
||||
MODEL_DIR="/__w/mmdeploy/testmodel/mmcls"
|
||||
SDK_PYTHON_DIR="mmdeploy_runtime"
|
||||
|
||||
if [[ -n "$1" ]]; then
|
||||
WORKSPACE=$1
|
||||
fi
|
||||
|
||||
cd $WORKSPACE
|
||||
cd $SDK_PYTHON_DIR
|
||||
|
||||
PY_VERSION=$(python3 -V | awk '{print $2}' | awk '{split($0, a, "."); print a[1]a[2]}')
|
||||
test_pkg=$(ls | grep mmdeploy_runtime-*cp${PY_VERSION}*)
|
||||
|
||||
python3 -m pip install $test_pkg --force-reinstall
|
||||
python3 -m pip install opencv-python
|
||||
|
||||
code="
|
||||
import cv2
|
||||
from mmdeploy_runtime import Classifier
|
||||
import sys
|
||||
handle = Classifier('$MODEL_DIR', 'cpu', 0)
|
||||
img = cv2.imread('$MODEL_DIR/demo.jpg')
|
||||
try:
|
||||
res = handle(img)
|
||||
print(res)
|
||||
except:
|
||||
print('error')
|
||||
sys.exit(1)
|
||||
"
|
||||
|
||||
python3 -c "$code"
|
Loading…
Reference in New Issue