improvement(installation): add script install mmdeploy (#919)
* feat(tools): add build ubuntu x64 ncnn * ci(tools): add ncnn auto install * fix(ci): auto install ncnn * fix(tools): no interactive * docs(build): add script build * CI(ncnn): script install ncnn * docs(zh_cn): fix error os * fix * CI(tools/script): test ort install passed * update * CI(tools): support pplnn * CI(build): add pplnn * docs(tools): update * fix * CI(tools): script install torchscript * docs(build): add torchscript * fix(tools): clean code and doc * update * fix(CI): requirements install failed * debug CI * update * update * update * feat(tools/script): support user specify make jobs * fix(tools/script): fix build pplnn with cuda * fix(tools/script): torchscript add tips and simplify install mmcv * fix(tools/script): check nvcc version first * fix(tools/scripts): pplnn checkout * fix(CI): add simple check install succcess * fix * debug CI * fix * fix(CI): pplnn install mis wheel * fix(CI): build error * fix(CI): remove misleading messagepull/951/head
parent
b1e7579bdd
commit
4534598056
|
@ -22,11 +22,6 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
torch: [1.9.0]
|
||||
include:
|
||||
- torch: 1.9.0
|
||||
torch_version: torch1.9
|
||||
torchvision: 0.10.0
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
@ -65,3 +60,23 @@ jobs:
|
|||
echo $(pwd)
|
||||
ln -s build/bin/mmdeploy_onnx2ncnn ./
|
||||
python3 .github/scripts/test_onnx2ncnn.py --run 1
|
||||
script_install:
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install mmdeploy
|
||||
run: |
|
||||
python3 tools/scripts/build_ubuntu_x64_ncnn.py
|
||||
python3 -m pip install torch==1.8.2 torchvision==0.9.2 --extra-index-url https://download.pytorch.org/whl/lts/1.8/cpu
|
||||
python3 -m pip install mmcv-full==1.5.1 -f https://download.openmmlab.com/mmcv/dist/cpu/torch1.8.0/index.html
|
||||
python3 -c 'import mmdeploy.apis.ncnn as ncnn_api; assert ncnn_api.is_available() and ncnn_api.is_custom_ops_available()'
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
name: backend-ort
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "demo/**"
|
||||
- "tools/**"
|
||||
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "demo/**"
|
||||
- "tools/**"
|
||||
- "docs/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
script_install:
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install mmdeploy
|
||||
run: |
|
||||
python3 tools/scripts/build_ubuntu_x64_ort.py
|
||||
python3 -m pip install torch==1.8.2 torchvision==0.9.2 --extra-index-url https://download.pytorch.org/whl/lts/1.8/cpu
|
||||
python3 -m pip install mmcv-full==1.5.1 -f https://download.openmmlab.com/mmcv/dist/cpu/torch1.8.0/index.html
|
||||
python3 -c 'import mmdeploy.apis.onnxruntime as ort_api; assert ort_api.is_available() and ort_api.is_custom_ops_available()'
|
|
@ -0,0 +1,39 @@
|
|||
name: backend-pplnn
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "demo/**"
|
||||
- "tools/**"
|
||||
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "demo/**"
|
||||
- "tools/**"
|
||||
- "docs/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
script_install:
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install mmdeploy
|
||||
run: |
|
||||
python3 tools/scripts/build_ubuntu_x64_pplnn.py
|
||||
python3 -m pip install torch==1.8.2 torchvision==0.9.2 --extra-index-url https://download.pytorch.org/whl/lts/1.8/cpu
|
||||
python3 -m pip install mmcv-full==1.5.1 -f https://download.openmmlab.com/mmcv/dist/cpu/torch1.8.0/index.html
|
||||
python3 -c 'import mmdeploy.apis.pplnn as pplnn_api; assert pplnn_api.is_available()'
|
|
@ -0,0 +1,36 @@
|
|||
name: backend-ort
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "demo/**"
|
||||
- "tools/**"
|
||||
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "demo/**"
|
||||
- "tools/**"
|
||||
- "docs/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
script_install:
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install mmdeploy
|
||||
run: |
|
||||
python3 tools/scripts/build_ubuntu_x64_torchscript.py
|
|
@ -69,6 +69,7 @@ Please read [getting_started](docs/en/get_started.md) for the basic usage of MMD
|
|||
|
||||
- [Build](docs/en/01-how-to-build/build_from_source.md)
|
||||
- [Build from Docker](docs/en/01-how-to-build/build_from_docker.md)
|
||||
- [Build from Script](docs/en/01-how-to-build/build_from_script.md)
|
||||
- [Build for Linux](docs/en/01-how-to-build/linux-x86_64.md)
|
||||
- [Build for Win10](docs/en/01-how-to-build/windows.md)
|
||||
- [Build for Android](docs/en/01-how-to-build/android.md)
|
||||
|
|
|
@ -67,6 +67,7 @@ MMDeploy 是 [OpenMMLab](https://openmmlab.com/) 模型部署工具箱,**为
|
|||
|
||||
- [快速上手](docs/zh_cn/get_started.md)
|
||||
- [编译](docs/zh_cn/01-how-to-build/build_from_source.md)
|
||||
- [一键式脚本安装](docs/zh_cn/01-how-to-build/build_from_script.md)
|
||||
- [Build from Docker](docs/zh_cn/01-how-to-build/build_from_docker.md)
|
||||
- [Build for Linux](docs/zh_cn/01-how-to-build/linux-x86_64.md)
|
||||
- [Build for Win10](docs/zh_cn/01-how-to-build/windows.md)
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
# Build from Script
|
||||
|
||||
Through user investigation, we know that most users are already familiar with python and torch before using mmdeploy. Therefore we provide scripts to simplify mmdeploy installation.
|
||||
|
||||
Assuming you have a python ready (whether `conda` or `pyenv`), run this script to install mmdeploy + ncnn backend, `nproc` is not compulsory.
|
||||
|
||||
```bash
|
||||
$ cd /path/to/mmdeploy
|
||||
$ python3 tools/scripts/build_ubuntu_x64_ncnn.py $(nproc)
|
||||
..
|
||||
```
|
||||
|
||||
A sudo password may be required during this time, and the script will try its best to build and install mmdeploy SDK and demo:
|
||||
|
||||
- Detect host OS version, `make` job number, whether use `root` and try to fix `python3 -m pip`
|
||||
- Find the necessary basic tools, such as g++-7, cmake, wget, etc.
|
||||
- Compile necessary dependencies, such as pyncnn, protobuf
|
||||
|
||||
The script will also try to avoid affecting host environment:
|
||||
|
||||
- The dependencies of source code compilation are placed in the `mmdeploy-dep` directory at the same level as mmdeploy
|
||||
- The script would not modify variables such as PATH, LD_LIBRARY_PATH, PYTHONPATH, etc.
|
||||
|
||||
Here is the verified installation script. If you want mmdeploy to support multiple backends at the same time, you can execute each script once:
|
||||
|
||||
| script | OS version |
|
||||
| :-----------------------------: | :---------: |
|
||||
| build_ubuntu_x64_ncnn.py | 18.04/20.04 |
|
||||
| build_ubuntu_x64_ort.py | 18.04/20.04 |
|
||||
| build_ubuntu_x64_pplnn.py | 18.04/20.04 |
|
||||
| build_ubuntu_x64_torchscript.py | 18.04/20.04 |
|
|
@ -1,4 +1,4 @@
|
|||
# Build From Source
|
||||
# Build from Source
|
||||
|
||||
## Download
|
||||
|
||||
|
|
|
@ -353,3 +353,21 @@ You can also activate other engines after the model.
|
|||
|
||||
make -j$(nproc) && make install
|
||||
```
|
||||
|
||||
- pplnn
|
||||
|
||||
```Bash
|
||||
cd ${MMDEPLOY_DIR}
|
||||
mkdir -p build && cd build
|
||||
cmake .. \
|
||||
-DCMAKE_CXX_COMPILER=g++-7 \
|
||||
-DMMDEPLOY_BUILD_SDK=ON \
|
||||
-DMMDEPLOY_BUILD_EXAMPLES=ON \
|
||||
-DMMDEPLOY_BUILD_SDK_PYTHON_API=ON \
|
||||
-DMMDEPLOY_TARGET_DEVICES="cuda;cpu" \
|
||||
-DMMDEPLOY_TARGET_BACKENDS=pplnn \
|
||||
-Dpplcv_DIR=${PPLCV_DIR}/cuda-build/cuda-build/install/lib/cmake/ppl \
|
||||
-Dpplnn_DIR=${PPLNN_DIR}/pplnn-build/install/lib/cmake/ppl
|
||||
|
||||
make -j$(nproc) && make install
|
||||
```
|
||||
|
|
|
@ -15,6 +15,7 @@ You can switch between Chinese and English documents in the lower-left corner of
|
|||
|
||||
01-how-to-build/build_from_source.md
|
||||
01-how-to-build/build_from_docker.md
|
||||
01-how-to-build/build_from_script.md
|
||||
01-how-to-build/cmake_option.md
|
||||
|
||||
.. toctree::
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
# 一键式脚本安装
|
||||
|
||||
通过用户调研,我们得知多数使用者在了解 mmdeploy 前,已经熟知 python 和 torch 用法。因此我们提供脚本简化 mmdeploy 安装。
|
||||
|
||||
假设您已经准备好 Python3.6 pip 以上环境(无论 conda 或 pyenv),运行这个脚本来安装 mmdeploy + ncnn backend,`nproc` 可以不指定。
|
||||
|
||||
```bash
|
||||
$ cd /path/to/mmdeploy
|
||||
$ python3 tools/scripts/build_ubuntu_x64_ncnn.py $(nproc)
|
||||
..
|
||||
```
|
||||
|
||||
期间可能需要 sudo 密码,脚本会尽最大努力完成 mmdeploy SDK 和 demo:
|
||||
|
||||
- 检测系统版本、make 使用的 job 个数、是否 root 用户,也会自动修复 pip 问题
|
||||
- 寻找必须的基础工具,如 g++-7、cmake、wget 等
|
||||
- 编译必须的依赖,如 pyncnn、 protobuf
|
||||
|
||||
脚本也会尽量避免影响 host 环境:
|
||||
|
||||
- 源码编译的依赖,都放在与 mmdeploy 同级的 `mmdeploy-dep` 目录中
|
||||
- 不会主动修改 PATH、LD_LIBRARY_PATH、PYTHONPATH 等变量
|
||||
|
||||
这是已验证的安装脚本。如果想让 mmdeploy 同时支持多种 backend,每个脚本执行一次即可:
|
||||
|
||||
| script | OS version |
|
||||
| :-----------------------------: | :---------: |
|
||||
| build_ubuntu_x64_ncnn.py | 18.04/20.04 |
|
||||
| build_ubuntu_x64_ort.py | 18.04/20.04 |
|
||||
| build_ubuntu_x64_pplnn.py | 18.04/20.04 |
|
||||
| build_ubuntu_x64_torchscript.py | 18.04/20.04 |
|
|
@ -1,6 +1,6 @@
|
|||
# 源码安装
|
||||
# 源码手动安装
|
||||
|
||||
如果环境允许(网络良好且宿主机强劲),我们建议使用 [docker 方式](build_from_docker.md) 。
|
||||
如果网络良好,我们建议使用 [docker](build_from_docker.md) 或 [一键式脚本](build_from_script.md) 方式。
|
||||
|
||||
## 下载
|
||||
|
||||
|
|
|
@ -348,3 +348,21 @@ pip install -e .
|
|||
|
||||
make -j$(nproc) && make install
|
||||
```
|
||||
|
||||
- pplnn
|
||||
|
||||
```Bash
|
||||
cd ${MMDEPLOY_DIR}
|
||||
mkdir -p build && cd build
|
||||
cmake .. \
|
||||
-DCMAKE_CXX_COMPILER=g++-7 \
|
||||
-DMMDEPLOY_BUILD_SDK=ON \
|
||||
-DMMDEPLOY_BUILD_EXAMPLES=ON \
|
||||
-DMMDEPLOY_BUILD_SDK_PYTHON_API=ON \
|
||||
-DMMDEPLOY_TARGET_DEVICES="cuda;cpu" \
|
||||
-DMMDEPLOY_TARGET_BACKENDS=pplnn \
|
||||
-Dpplcv_DIR=${PPLCV_DIR}/cuda-build/cuda-build/install/lib/cmake/ppl \
|
||||
-Dpplnn_DIR=${PPLNN_DIR}/pplnn-build/install/lib/cmake/ppl
|
||||
|
||||
make -j$(nproc) && make install
|
||||
```
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
01-how-to-build/build_from_source.md
|
||||
01-how-to-build/build_from_docker.md
|
||||
01-how-to-build/build_from_script.md
|
||||
01-how-to-build/cmake_option.md
|
||||
|
||||
.. toctree::
|
||||
|
|
|
@ -0,0 +1,163 @@
|
|||
# Copyright (c) OpenMMLab. All rights reserved.
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ubuntu_utils import cmd_result, ensure_base_env, get_job
|
||||
|
||||
g_jobs = 2
|
||||
|
||||
|
||||
def install_protobuf(dep_dir) -> int:
|
||||
"""build and install protobuf.
|
||||
|
||||
Args:
|
||||
wor_dir (_type_): _description_
|
||||
|
||||
Returns:
|
||||
: _description_
|
||||
"""
|
||||
print('-' * 10 + 'install protobuf' + '-' * 10)
|
||||
|
||||
os.chdir(dep_dir)
|
||||
if not os.path.exists('protobuf-3.20.0'):
|
||||
os.system(
|
||||
'wget https://github.com/protocolbuffers/protobuf/releases/download/v3.20.0/protobuf-cpp-3.20.0.tar.gz' # noqa: E501
|
||||
)
|
||||
os.system('tar xvf protobuf-cpp-3.20.0.tar.gz')
|
||||
|
||||
os.chdir(os.path.join(dep_dir, 'protobuf-3.20.0'))
|
||||
|
||||
install_dir = os.path.join(dep_dir, 'pbinstall')
|
||||
os.system('./configure --prefix={}'.format(install_dir))
|
||||
os.system('make -j {} && make install'.format(g_jobs))
|
||||
protoc = os.path.join(dep_dir, 'pbinstall', 'bin', 'protoc')
|
||||
|
||||
print('protoc \t:{}'.format(cmd_result('{} --version'.format(protoc))))
|
||||
return 0
|
||||
|
||||
|
||||
def install_pyncnn(dep_dir):
|
||||
print('-' * 10 + 'build and install pyncnn' + '-' * 10)
|
||||
time.sleep(2)
|
||||
|
||||
# generate unzip and build dir
|
||||
os.chdir(dep_dir)
|
||||
|
||||
# git clone
|
||||
if not os.path.exists('ncnn'):
|
||||
os.system(
|
||||
'git clone --depth 1 --branch 20220729 https://github.com/tencent/ncnn && cd ncnn' # noqa: E501
|
||||
)
|
||||
|
||||
ncnn_dir = os.path.join(dep_dir, 'ncnn')
|
||||
os.chdir(ncnn_dir)
|
||||
|
||||
# update submodule pybind11, gslang not required
|
||||
os.system('git submodule init && git submodule update python/pybind11')
|
||||
# build
|
||||
if not os.path.exists('build'):
|
||||
os.system('mkdir build')
|
||||
|
||||
os.chdir(os.path.join(ncnn_dir, 'build'))
|
||||
pb_install = os.path.join(dep_dir, 'pbinstall')
|
||||
pb_bin = os.path.join(pb_install, 'bin', 'protoc')
|
||||
pb_lib = os.path.join(pb_install, 'lib', 'libprotobuf.so')
|
||||
pb_include = os.path.join(pb_install, 'include')
|
||||
|
||||
cmd = 'cmake .. '
|
||||
cmd += ' -DNCNN_PYTHON=ON '
|
||||
cmd += ' -DProtobuf_LIBRARIES={} '.format(pb_lib)
|
||||
cmd += ' -DProtobuf_PROTOC_EXECUTABLE={} '.format(pb_bin)
|
||||
cmd += ' -DProtobuf_INCLUDE_DIR={} '.format(pb_include)
|
||||
cmd += ' && make -j {} '.format(g_jobs)
|
||||
cmd += ' && make install '
|
||||
os.system(cmd)
|
||||
|
||||
# install
|
||||
os.chdir(ncnn_dir)
|
||||
os.system('cd python && python -m pip install -e .')
|
||||
ncnn_cmake_dir = os.path.join(ncnn_dir, 'build', 'install', 'lib', 'cmake',
|
||||
'ncnn')
|
||||
assert (os.path.exists(ncnn_cmake_dir))
|
||||
print('ncnn cmake dir \t:{}'.format(ncnn_cmake_dir))
|
||||
print('\n')
|
||||
return ncnn_cmake_dir
|
||||
|
||||
|
||||
def install_mmdeploy(work_dir, dep_dir, ncnn_cmake_dir):
|
||||
print('-' * 10 + 'build and install mmdeploy' + '-' * 10)
|
||||
time.sleep(3)
|
||||
|
||||
os.chdir(work_dir)
|
||||
if not os.path.exists('build'):
|
||||
os.system('mkdir build')
|
||||
|
||||
pb_install = os.path.join(dep_dir, 'pbinstall')
|
||||
pb_bin = os.path.join(pb_install, 'bin', 'protoc')
|
||||
pb_lib = os.path.join(pb_install, 'lib', 'libprotobuf.so')
|
||||
pb_include = os.path.join(pb_install, 'include')
|
||||
|
||||
cmd = 'cd build && cmake ..'
|
||||
cmd += ' -DCMAKE_C_COMPILER=gcc-7 '
|
||||
cmd += ' -DCMAKE_CXX_COMPILER=g++-7 '
|
||||
cmd += ' -DMMDEPLOY_BUILD_SDK=ON '
|
||||
cmd += ' -DMMDEPLOY_BUILD_EXAMPLES=ON '
|
||||
cmd += ' -DMMDEPLOY_BUILD_SDK_PYTHON_API=ON '
|
||||
cmd += ' -DMMDEPLOY_TARGET_DEVICES=cpu '
|
||||
cmd += ' -DMMDEPLOY_TARGET_BACKENDS=ncnn '
|
||||
cmd += ' -DProtobuf_PROTOC_EXECUTABLE={} '.format(pb_bin)
|
||||
cmd += ' -DProtobuf_LIBRARIES={} '.format(pb_lib)
|
||||
cmd += ' -DProtobuf_INCLUDE_DIR={} '.format(pb_include)
|
||||
cmd += ' -Dncnn_DIR={} '.format(ncnn_cmake_dir)
|
||||
os.system(cmd)
|
||||
|
||||
os.system('cd build && make -j {} && make install'.format(g_jobs))
|
||||
os.system('python3 -m pip install -v -e .')
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
"""Auto install mmdeploy with ncnn. To verify this script:
|
||||
|
||||
1) use `sudo docker run -v /path/to/mmdeploy:/root/mmdeploy -v /path/to/Miniconda3-latest-Linux-x86_64.sh:/root/miniconda.sh -it ubuntu:18.04 /bin/bash` # noqa: E501
|
||||
2) install conda and setup python environment
|
||||
3) run `python3 tools/scripts/build_ubuntu_x64_ncnn.py`
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
global g_jobs
|
||||
g_jobs = get_job(sys.argv)
|
||||
print('g_jobs {}'.format(g_jobs))
|
||||
|
||||
work_dir = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
|
||||
dep_dir = os.path.abspath(os.path.join(work_dir, '..', 'mmdeploy-dep'))
|
||||
if not os.path.exists(dep_dir):
|
||||
if os.path.isfile(dep_dir):
|
||||
print('{} already exists and it is a file, exit.'.format(work_dir))
|
||||
return -1
|
||||
os.mkdir(dep_dir)
|
||||
|
||||
success, envs = ensure_base_env(work_dir, dep_dir)
|
||||
if success != 0:
|
||||
return -1
|
||||
|
||||
if install_protobuf(dep_dir) != 0:
|
||||
return -1
|
||||
|
||||
ncnn_cmake_dir = install_pyncnn(dep_dir)
|
||||
|
||||
if install_mmdeploy(work_dir, dep_dir, ncnn_cmake_dir) != 0:
|
||||
return -1
|
||||
|
||||
if len(envs) > 0:
|
||||
print(
|
||||
'We recommend that you set the following environment variables:\n')
|
||||
for env in envs:
|
||||
print(env)
|
||||
print('\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,97 @@
|
|||
# Copyright (c) OpenMMLab. All rights reserved.
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ubuntu_utils import ensure_base_env, get_job
|
||||
|
||||
g_jobs = 2
|
||||
|
||||
|
||||
def install_ort(dep_dir):
|
||||
print('-' * 10 + 'install ort' + '-' * 10)
|
||||
time.sleep(2)
|
||||
|
||||
# generate unzip and build dir
|
||||
os.chdir(dep_dir)
|
||||
|
||||
# install python onnxruntime
|
||||
os.system('python3 -m pip install onnxruntime==1.8.1')
|
||||
# git clone
|
||||
if not os.path.exists('onnxruntime-linux-x64-1.8.1'):
|
||||
os.system(
|
||||
'wget https://github.com/microsoft/onnxruntime/releases/download/v1.8.1/onnxruntime-linux-x64-1.8.1.tgz' # noqa: E501
|
||||
)
|
||||
os.system('tar xvf onnxruntime-linux-x64-1.8.1.tgz')
|
||||
|
||||
ort_dir = os.path.join(dep_dir, 'onnxruntime-linux-x64-1.8.1')
|
||||
print('onnxruntime dir \t:{}'.format(ort_dir))
|
||||
print('\n')
|
||||
return ort_dir
|
||||
|
||||
|
||||
def install_mmdeploy(work_dir, ort_dir):
|
||||
print('-' * 10 + 'build and install mmdeploy' + '-' * 10)
|
||||
time.sleep(3)
|
||||
|
||||
os.chdir(work_dir)
|
||||
if not os.path.exists('build'):
|
||||
os.system('mkdir build')
|
||||
|
||||
cmd = 'cd build && cmake ..'
|
||||
cmd += ' -DCMAKE_C_COMPILER=gcc-7 '
|
||||
cmd += ' -DCMAKE_CXX_COMPILER=g++-7 '
|
||||
cmd += ' -DMMDEPLOY_BUILD_SDK=ON '
|
||||
cmd += ' -DMMDEPLOY_BUILD_EXAMPLES=ON '
|
||||
cmd += ' -DMMDEPLOY_BUILD_SDK_PYTHON_API=ON '
|
||||
cmd += ' -DMMDEPLOY_TARGET_DEVICES=cpu '
|
||||
cmd += ' -DMMDEPLOY_TARGET_BACKENDS=ort '
|
||||
cmd += ' -DONNXRUNTIME_DIR={} '.format(ort_dir)
|
||||
os.system(cmd)
|
||||
|
||||
os.system('cd build && make -j {} && make install'.format(g_jobs))
|
||||
os.system('python3 -m pip install -e .')
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
"""Auto install mmdeploy with ort. To verify this script:
|
||||
|
||||
1) use `sudo docker run -v /path/to/mmdeploy:/root/mmdeploy -v /path/to/Miniconda3-latest-Linux-x86_64.sh:/root/miniconda.sh -it ubuntu:18.04 /bin/bash` # noqa: E501
|
||||
2) install conda and setup python environment
|
||||
3) run `python3 tools/scripts/build_ubuntu_x64_ort.py`
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
global g_jobs
|
||||
g_jobs = get_job(sys.argv)
|
||||
print('g_jobs {}'.format(g_jobs))
|
||||
|
||||
work_dir = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
|
||||
dep_dir = os.path.abspath(os.path.join(work_dir, '..', 'mmdeploy-dep'))
|
||||
if not os.path.exists(dep_dir):
|
||||
if os.path.isfile(dep_dir):
|
||||
print('{} already exists and it is a file, exit.'.format(work_dir))
|
||||
return -1
|
||||
os.mkdir(dep_dir)
|
||||
|
||||
success, envs = ensure_base_env(work_dir, dep_dir)
|
||||
if success != 0:
|
||||
return -1
|
||||
|
||||
ort_dir = install_ort(dep_dir)
|
||||
|
||||
if install_mmdeploy(work_dir, ort_dir) != 0:
|
||||
return -1
|
||||
|
||||
if len(envs) > 0:
|
||||
print(
|
||||
'We recommend that you set the following environment variables:\n')
|
||||
for env in envs:
|
||||
print(env)
|
||||
print('\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,166 @@
|
|||
# Copyright (c) OpenMMLab. All rights reserved.
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ubuntu_utils import cmd_result, ensure_base_env, get_job
|
||||
|
||||
g_jobs = 2
|
||||
|
||||
|
||||
def install_pplcv(dep_dir, build_cuda):
|
||||
print('-' * 10 + 'install pplcv' + '-' * 10)
|
||||
time.sleep(2)
|
||||
|
||||
os.chdir(dep_dir)
|
||||
|
||||
pplcv_dir = os.path.join(dep_dir, 'ppl.cv')
|
||||
|
||||
# git clone
|
||||
if not os.path.exists(pplcv_dir):
|
||||
os.system(
|
||||
'git clone --depth 1 --branch v0.7.0 https://github.com/openppl-public/ppl.cv/' # noqa: E501
|
||||
)
|
||||
|
||||
# build
|
||||
os.chdir(pplcv_dir)
|
||||
if build_cuda is True:
|
||||
os.system('./build.sh cuda')
|
||||
pplcv_cmake_dir = os.path.join(pplcv_dir,
|
||||
'cuda-build/install/lib/cmake/ppl')
|
||||
else:
|
||||
os.system('./build.sh x86_64')
|
||||
pplcv_cmake_dir = os.path.join(pplcv_dir,
|
||||
'x86-64-build/install/lib/cmake/ppl')
|
||||
|
||||
print('\n')
|
||||
return pplcv_cmake_dir
|
||||
|
||||
|
||||
def install_pplnn(dep_dir, build_cuda):
|
||||
print('-' * 10 + 'install pplnn' + '-' * 10)
|
||||
time.sleep(2)
|
||||
|
||||
# generate unzip and build dir
|
||||
os.chdir(dep_dir)
|
||||
|
||||
pplnn_dir = os.path.join(dep_dir, 'ppl.nn')
|
||||
|
||||
# git clone
|
||||
if not os.path.exists(pplnn_dir):
|
||||
os.system(
|
||||
'git clone --depth 1 --branch v0.8.2 https://github.com/openppl-public/ppl.nn/' # noqa: E501
|
||||
)
|
||||
|
||||
# build
|
||||
os.chdir(pplnn_dir)
|
||||
if build_cuda is True:
|
||||
os.system(
|
||||
'./build.sh -DPPLNN_USE_CUDA=ON -DPPLNN_USE_X86_64=ON -DPPLNN_ENABLE_PYTHON_API=ON' # noqa: E501
|
||||
)
|
||||
else:
|
||||
os.system(
|
||||
'./build.sh -DPPLNN_USE_X86_64=ON -DPPLNN_ENABLE_PYTHON_API=ON' # noqa: E501
|
||||
)
|
||||
|
||||
os.system('cd python/package && ./build.sh')
|
||||
os.system(
|
||||
'cd /tmp/pyppl-package/dist && python3 -m pip install pyppl*.whl --force-reinstall' # noqa: E501
|
||||
)
|
||||
|
||||
pplnn_cmake_dir = os.path.join(pplnn_dir,
|
||||
'pplnn-build/install/lib/cmake/ppl')
|
||||
print('\n')
|
||||
return pplnn_cmake_dir
|
||||
|
||||
|
||||
def install_mmdeploy(work_dir, pplnn_cmake_dir, pplcv_cmake_dir, build_cuda):
|
||||
print('-' * 10 + 'build and install mmdeploy' + '-' * 10)
|
||||
time.sleep(3)
|
||||
|
||||
os.chdir(work_dir)
|
||||
if not os.path.exists('build'):
|
||||
os.system('mkdir build')
|
||||
|
||||
cmd = 'cd build && cmake ..'
|
||||
cmd += ' -DCMAKE_C_COMPILER=gcc-7 '
|
||||
cmd += ' -DCMAKE_CXX_COMPILER=g++-7 '
|
||||
cmd += ' -DMMDEPLOY_BUILD_SDK=ON '
|
||||
cmd += ' -DMMDEPLOY_BUILD_EXAMPLES=ON '
|
||||
cmd += ' -DMMDEPLOY_BUILD_SDK_PYTHON_API=ON '
|
||||
cmd += ' -DMMDEPLOY_TARGET_BACKENDS=pplnn '
|
||||
|
||||
if build_cuda is True:
|
||||
cmd += ' -DMMDEPLOY_TARGET_DEVICES="cuda;cpu" '
|
||||
else:
|
||||
cmd += ' -DMMDEPLOY_TARGET_DEVICES=cpu '
|
||||
|
||||
cmd += ' -Dpplcv_DIR={} '.format(pplcv_cmake_dir)
|
||||
cmd += ' -Dpplnn_DIR={} '.format(pplnn_cmake_dir)
|
||||
os.system(cmd)
|
||||
|
||||
os.system('cd build && make -j {} && make install'.format(g_jobs))
|
||||
os.system('python3 -m pip install -e .')
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
"""Auto install mmdeploy with pplnn. To verify this script:
|
||||
|
||||
1) use `sudo docker run -v /path/to/mmdeploy:/root/mmdeploy -v /path/to/Miniconda3-latest-Linux-x86_64.sh:/root/miniconda.sh -it ubuntu:18.04 /bin/bash` # noqa: E501
|
||||
2) install conda and setup python environment
|
||||
3) run `python3 tools/scripts/build_ubuntu_x64_pplnn.py`
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
global g_jobs
|
||||
g_jobs = get_job(sys.argv)
|
||||
print('g_jobs {}'.format(g_jobs))
|
||||
|
||||
work_dir = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
|
||||
dep_dir = os.path.abspath(os.path.join(work_dir, '..', 'mmdeploy-dep'))
|
||||
if not os.path.exists(dep_dir):
|
||||
if os.path.isfile(dep_dir):
|
||||
print('{} already exists and it is a file, exit.'.format(work_dir))
|
||||
return -1
|
||||
os.mkdir(dep_dir)
|
||||
|
||||
success, envs = ensure_base_env(work_dir, dep_dir)
|
||||
if success != 0:
|
||||
return -1
|
||||
|
||||
# enable g++ and gcc
|
||||
gplus = cmd_result('which g++')
|
||||
if gplus is None or len(gplus) < 1:
|
||||
sudo = 'sudo'
|
||||
if 'root' in cmd_result('whoami'):
|
||||
sudo = ''
|
||||
os.system(
|
||||
'{} update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 200' # noqa: E501
|
||||
.format(sudo))
|
||||
os.system(
|
||||
'{} update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-7 200' # noqa: E501
|
||||
.format(sudo))
|
||||
|
||||
# install pplcv and pplnn
|
||||
nvcc = cmd_result('which nvcc')
|
||||
build_cuda = False
|
||||
if nvcc is not None and len(nvcc) > 1:
|
||||
build_cuda = True
|
||||
pplcv_cmake_dir = install_pplcv(dep_dir, build_cuda)
|
||||
pplnn_cmake_dir = install_pplnn(dep_dir, build_cuda)
|
||||
if install_mmdeploy(work_dir, pplnn_cmake_dir, pplcv_cmake_dir,
|
||||
build_cuda) != 0:
|
||||
return -1
|
||||
|
||||
if len(envs) > 0:
|
||||
print(
|
||||
'We recommend that you set the following environment variables:\n')
|
||||
for env in envs:
|
||||
print(env)
|
||||
print('\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,126 @@
|
|||
# Copyright (c) OpenMMLab. All rights reserved.
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ubuntu_utils import cmd_result, cu_version_name, ensure_base_env, get_job
|
||||
|
||||
g_jobs = 2
|
||||
|
||||
|
||||
def install_libtorch(dep_dir):
|
||||
print('-' * 10 + 'install libtorch' + '-' * 10)
|
||||
time.sleep(2)
|
||||
|
||||
os.chdir(dep_dir)
|
||||
unzipped_name = 'libtorch'
|
||||
if os.path.exists(unzipped_name):
|
||||
return os.path.join(dep_dir, unzipped_name)
|
||||
|
||||
torch_version = None
|
||||
try:
|
||||
import torch
|
||||
torch_version = torch.__version__
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if torch_version is None:
|
||||
print('torch version is None, use 1.11.0')
|
||||
torch_version = '1.11.0'
|
||||
|
||||
version_name = None
|
||||
|
||||
# first check `nvcc` version, if failed, use `nvidia-smi`
|
||||
cuda = cmd_result(
|
||||
" nvcc --version | grep release | awk '{print $5}' | awk -F , '{print $1}' " # noqa: E501
|
||||
)
|
||||
if cuda is None or len(cuda) < 1:
|
||||
cuda = cmd_result(" nvidia-smi | grep CUDA | awk '{print $9}' ")
|
||||
|
||||
if cuda is not None and len(cuda) > 0:
|
||||
version_name = cu_version_name(cuda)
|
||||
else:
|
||||
version_name = 'cpu'
|
||||
|
||||
filename = 'libtorch-shared-with-deps-{}%2B{}.zip'.format(
|
||||
torch_version, version_name)
|
||||
url = 'https://download.pytorch.org/libtorch/{}/{}'.format(
|
||||
version_name, filename)
|
||||
os.system('wget {} -O libtorch.zip'.format(url))
|
||||
os.system('unzip libtorch.zip')
|
||||
if not os.path.exists(unzipped_name):
|
||||
print(
|
||||
'download or unzip libtorch from {} failed, please check https://pytorch.org/get-started/locally/' # noqa: E501
|
||||
.format(url))
|
||||
return None
|
||||
return os.path.join(dep_dir, unzipped_name)
|
||||
|
||||
|
||||
def install_mmdeploy(work_dir, libtorch_dir):
|
||||
print('-' * 10 + 'build and install mmdeploy' + '-' * 10)
|
||||
time.sleep(3)
|
||||
|
||||
os.chdir(work_dir)
|
||||
if not os.path.exists('build'):
|
||||
os.system('mkdir build')
|
||||
|
||||
cmd = 'cd build && Torch_DIR={} cmake ..'.format(libtorch_dir)
|
||||
cmd += ' -DCMAKE_C_COMPILER=gcc-7 '
|
||||
cmd += ' -DCMAKE_CXX_COMPILER=g++-7 '
|
||||
cmd += ' -DMMDEPLOY_BUILD_SDK=ON '
|
||||
cmd += ' -DMMDEPLOY_BUILD_EXAMPLES=ON '
|
||||
cmd += ' -DMMDEPLOY_BUILD_SDK_PYTHON_API=ON '
|
||||
cmd += ' -DMMDEPLOY_TARGET_DEVICES=cpu '
|
||||
cmd += ' -DMMDEPLOY_TARGET_BACKENDS=torchscript '
|
||||
cmd += ' -DTORCHSCRIPT_DIR={} '.format(libtorch_dir)
|
||||
os.system(cmd)
|
||||
|
||||
os.system('cd build && make -j {} && make install'.format(g_jobs))
|
||||
os.system('python3 -m pip install -e .')
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
"""Auto install mmdeploy with ort. To verify this script:
|
||||
|
||||
1) use `sudo docker run -v /path/to/mmdeploy:/root/mmdeploy -v /path/to/Miniconda3-latest-Linux-x86_64.sh:/root/miniconda.sh -it ubuntu:18.04 /bin/bash` # noqa: E501
|
||||
2) install conda and setup python environment
|
||||
3) run `python3 tools/scripts/build_ubuntu_x64_torchscript.py`
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
global g_jobs
|
||||
g_jobs = get_job(sys.argv)
|
||||
print('g_jobs {}'.format(g_jobs))
|
||||
|
||||
work_dir = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
|
||||
dep_dir = os.path.abspath(os.path.join(work_dir, '..', 'mmdeploy-dep'))
|
||||
if not os.path.exists(dep_dir):
|
||||
if os.path.isfile(dep_dir):
|
||||
print('{} already exists and it is a file, exit.'.format(work_dir))
|
||||
return -1
|
||||
os.mkdir(dep_dir)
|
||||
|
||||
success, envs = ensure_base_env(work_dir, dep_dir)
|
||||
if success != 0:
|
||||
return -1
|
||||
|
||||
libtorch_dir = install_libtorch(dep_dir)
|
||||
|
||||
if libtorch_dir is None:
|
||||
return -1
|
||||
|
||||
if install_mmdeploy(work_dir, libtorch_dir) != 0:
|
||||
return -1
|
||||
|
||||
if len(envs) > 0:
|
||||
print(
|
||||
'We recommend that you set the following environment variables:\n')
|
||||
for env in envs:
|
||||
print(env)
|
||||
print('\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,227 @@
|
|||
# Copyright (c) OpenMMLab. All rights reserved.
|
||||
import os
|
||||
import time
|
||||
|
||||
|
||||
def cmd_result(txt: str):
|
||||
cmd = os.popen(txt)
|
||||
return cmd.read().rstrip().lstrip()
|
||||
|
||||
|
||||
def get_job(argv) -> int:
|
||||
# get nprocs, if user not specified, use max(2, nproc-1)
|
||||
job = 2
|
||||
if len(argv) <= 1:
|
||||
print('your can use `python3 {} N` to set make -j [N]'.format(argv[0]))
|
||||
nproc = cmd_result('nproc')
|
||||
if nproc is not None and len(nproc) > 0:
|
||||
job = max(int(nproc) - 1, 2)
|
||||
else:
|
||||
job = 2
|
||||
else:
|
||||
job = int(argv[1])
|
||||
return job
|
||||
|
||||
|
||||
def version_major(txt: str) -> int:
|
||||
return int(txt.split('.')[0])
|
||||
|
||||
|
||||
def version_minor(txt: str) -> int:
|
||||
return int(txt.split('.')[1])
|
||||
|
||||
|
||||
def cu_version_name(version: str) -> str:
|
||||
versions = version.split('.')
|
||||
return 'cu' + versions[0] + versions[1]
|
||||
|
||||
|
||||
def simple_check_install(bin: str, sudo: str) -> str:
|
||||
result = cmd_result('which {}'.format(bin))
|
||||
if result is None or len(result) < 1:
|
||||
print('{} not found, try install {} ..'.format(bin, bin), end='')
|
||||
os.system('{} apt install {} -y'.format(sudo, bin))
|
||||
result = cmd_result('which {}'.format(bin))
|
||||
if result is None or len(result) < 1:
|
||||
print('Check {} failed.'.format(bin))
|
||||
return None
|
||||
print('success')
|
||||
return result
|
||||
|
||||
|
||||
def ensure_base_env(work_dir, dep_dir):
|
||||
description = """
|
||||
check python, root, pytorch version, auto install these binary:
|
||||
|
||||
* make
|
||||
* g++-7
|
||||
* git
|
||||
* wget
|
||||
* unzip
|
||||
* opencv
|
||||
* mmcv (not compulsory)
|
||||
"""
|
||||
|
||||
envs = []
|
||||
print('-' * 10 + 'ensure base env' + '-' * 10)
|
||||
print(description)
|
||||
|
||||
os.system('python3 -m ensurepip')
|
||||
os.system('python3 -m pip install wheel')
|
||||
|
||||
sudo = 'sudo'
|
||||
if 'root' in cmd_result('whoami'):
|
||||
sudo = ''
|
||||
|
||||
# check ubuntu
|
||||
ubuntu = cmd_result(
|
||||
""" lsb_release -a 2>/dev/null | grep "Release" | tail -n 1 | awk '{print $NF}' """ # noqa: E501
|
||||
)
|
||||
|
||||
# check cmake version
|
||||
cmake = cmd_result('which cmake')
|
||||
if cmake is None or len(cmake) < 1:
|
||||
print('cmake not found, try install cmake ..', end='')
|
||||
os.system('python3 -m pip install cmake>=3.14.0')
|
||||
|
||||
cmake = cmd_result('which cmake')
|
||||
if cmake is None or len(cmake) < 1:
|
||||
env = 'export PATH=${PATH}:~/.local/bin'
|
||||
os.system(env)
|
||||
envs.append(env)
|
||||
|
||||
cmake = cmd_result('which cmake')
|
||||
if cmake is None or len(cmake) < 1:
|
||||
print('Check cmake failed.')
|
||||
return -1, envs
|
||||
print('success')
|
||||
|
||||
# check make
|
||||
make = cmd_result('which make')
|
||||
if make is None or len(make) < 1:
|
||||
print('make not found, try install make ..', end='')
|
||||
os.system('{} apt update --fix-missing'.format(sudo))
|
||||
|
||||
os.system(
|
||||
'{} DEBIAN_FRONTEND="noninteractive" apt install make'.format(
|
||||
sudo))
|
||||
make = cmd_result('which make')
|
||||
if make is None or len(make) < 1:
|
||||
print('Check make failed.')
|
||||
return -1, envs
|
||||
print('success')
|
||||
|
||||
# check g++ version
|
||||
gplus = cmd_result('which g++-7')
|
||||
if gplus is None or len(gplus) < 1:
|
||||
# install g++
|
||||
print('g++-7 not found, try install g++-7 ..', end='')
|
||||
os.system(
|
||||
'{} DEBIAN_FRONTEND="noninteractive" apt install software-properties-common -y' # noqa: E501
|
||||
.format(sudo)) # noqa: E501
|
||||
os.system('{} apt update'.format(sudo))
|
||||
if ubuntu is None or len(ubuntu) < 1 or version_major(ubuntu) <= 18:
|
||||
os.system(
|
||||
'{} add-apt-repository ppa:ubuntu-toolchain-r/test -y'.format(
|
||||
sudo))
|
||||
os.system('{} apt install gcc-7 g++-7 -y'.format(sudo))
|
||||
|
||||
gplus = cmd_result('which g++-7')
|
||||
if gplus is None or len(gplus) < 1:
|
||||
print('Check g++-7 failed.')
|
||||
return -1, envs
|
||||
os.system(
|
||||
'{} update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 200' # noqa: E501
|
||||
.format(sudo))
|
||||
os.system(
|
||||
'{} update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-7 200' # noqa: E501
|
||||
.format(sudo))
|
||||
print('success')
|
||||
|
||||
# wget
|
||||
wget = simple_check_install('wget', sudo)
|
||||
|
||||
# check torch and mmcv, we try to install mmcv, it is not compulsory
|
||||
mmcv_version = None
|
||||
torch_version = None
|
||||
try:
|
||||
import torch
|
||||
torch_version = torch.__version__
|
||||
|
||||
try:
|
||||
import mmcv
|
||||
mmcv_version = mmcv.__version__
|
||||
except Exception:
|
||||
# install mmcv
|
||||
print('mmcv not found, try install mmcv ..', end='')
|
||||
os.system('python3 -m pip install -U openmim')
|
||||
os.system('mim install mmcv-full==1.5.1')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# git
|
||||
git = simple_check_install('git', sudo)
|
||||
|
||||
# unzip
|
||||
unzip = simple_check_install('unzip', sudo)
|
||||
|
||||
# opencv
|
||||
ocv = cmd_result('which opencv_version')
|
||||
if ocv is None or len(ocv) < 1:
|
||||
print('ocv not found, try install git ..', end='')
|
||||
os.system(
|
||||
'{} add-apt-repository ppa:ignaciovizzo/opencv3-nonfree -y'.format(
|
||||
sudo))
|
||||
os.system('{} apt update'.format(sudo))
|
||||
os.system(
|
||||
'{} DEBIAN_FRONTEND="noninteractive" apt install libopencv-dev -y'
|
||||
.format(sudo))
|
||||
|
||||
ocv = cmd_result('which opencv_version')
|
||||
if ocv is None or len(ocv) < 1:
|
||||
print('Check ocv failed.')
|
||||
return -1, envs
|
||||
print('success')
|
||||
|
||||
# print all
|
||||
print('ubuntu \t\t:{}'.format(ubuntu))
|
||||
|
||||
# check python
|
||||
print('python bin\t:{}'.format(cmd_result('which python3')))
|
||||
print('python version\t:{}'.format(
|
||||
cmd_result("python3 --version | awk '{print $2}'")))
|
||||
|
||||
print('cmake bin\t:{}'.format(cmake))
|
||||
print('cmake version\t:{}'.format(
|
||||
cmd_result("cmake --version | head -n 1 | awk '{print $3}'")))
|
||||
|
||||
print('make bin\t:{}'.format(make))
|
||||
print('make version\t:{}'.format(
|
||||
cmd_result(" make --version | head -n 1 | awk '{print $3}' ")))
|
||||
|
||||
print('wget bin\t:{}'.format(wget))
|
||||
print('g++-7 bin\t:{}'.format(gplus))
|
||||
|
||||
print('mmcv version\t:{}'.format(mmcv_version))
|
||||
if mmcv_version is None:
|
||||
print('\t please install an mm serials algorithm later.')
|
||||
time.sleep(2)
|
||||
|
||||
print('torch version\t:{}'.format(torch_version))
|
||||
if torch_version is None:
|
||||
print('\t please install pytorch later.')
|
||||
time.sleep(2)
|
||||
|
||||
print('ocv version\t:{}'.format(cmd_result('opencv_version')))
|
||||
|
||||
print('git bin\t\t:{}'.format(git))
|
||||
print('git version\t:{}'.format(
|
||||
cmd_result("git --version | awk '{print $3}' ")))
|
||||
print('unzip bin\t:{}'.format(unzip))
|
||||
# work dir
|
||||
print('work dir \t:{}'.format(work_dir))
|
||||
# dep dir
|
||||
print('dep dir \t:{}'.format(dep_dir))
|
||||
|
||||
print('\n')
|
||||
return 0, envs
|
Loading…
Reference in New Issue