Merge branch 'dev-1.x' of github.com:triple-Mu/mmdeploy into triplemu/dockerfile

 Conflicts:
	docker/CPU/Dockerfile
	docker/GPU/Dockerfile
pull/1296/head
triple-Mu 2022-11-03 16:49:57 +08:00
commit aef48958c6
2 changed files with 18 additions and 20 deletions

View File

@ -41,16 +41,17 @@ RUN curl -fsSL -v -o ~/miniconda.sh -O https://repo.anaconda.com/miniconda/Mini
~/miniconda.sh -b -p /opt/conda && \ ~/miniconda.sh -b -p /opt/conda && \
rm ~/miniconda.sh && \ rm ~/miniconda.sh && \
/opt/conda/bin/conda install -y python=${PYTHON_VERSION} conda-build pyyaml numpy ipython cython typing typing_extensions mkl mkl-include ninja && \ /opt/conda/bin/conda install -y python=${PYTHON_VERSION} conda-build pyyaml numpy ipython cython typing typing_extensions mkl mkl-include ninja && \
/opt/conda/bin/conda clean -ya /opt/conda/bin/conda clean -ya \
### install open-mim
RUN /opt/conda/bin/pip install --no-cache-dir openmim
### pytorch mmcv onnxruntme and openvino
RUN mim install torch==${TORCH_VERSION}+cpu torchvision==${TORCHVISION_VERSION}+cpu -f https://download.pytorch.org/whl/cpu/torch_stable.html \
&& mim install --no-cache-dir "mmcv>=2.0.0rc1,<2.1.0" onnxruntime==${ONNXRUNTIME_VERSION} openvino-dev
### pytorch
RUN /opt/conda/bin/pip install torch==${TORCH_VERSION}+cpu torchvision==${TORCHVISION_VERSION}+cpu -f https://download.pytorch.org/whl/cpu/torch_stable.html
ENV PATH /opt/conda/bin:$PATH ENV PATH /opt/conda/bin:$PATH
### install open-mim mmcv
RUN /opt/conda/bin/pip install --no-cache-dir openmim \
&& mim install --no-cache-dir "mmcv>=2.0.0rc2,<2.1.0" \
WORKDIR /root/workspace WORKDIR /root/workspace
### get onnxruntime ### get onnxruntime
@ -59,9 +60,6 @@ RUN wget https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTI
ENV ONNXRUNTIME_DIR=/root/workspace/onnxruntime-linux-x64-${ONNXRUNTIME_VERSION} ENV ONNXRUNTIME_DIR=/root/workspace/onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}
### install onnxruntme and openvino
RUN /opt/conda/bin/pip install onnxruntime==${ONNXRUNTIME_VERSION} openvino-dev
### build ncnn ### build ncnn
RUN git clone https://github.com/Tencent/ncnn.git &&\ RUN git clone https://github.com/Tencent/ncnn.git &&\
cd ncnn &&\ cd ncnn &&\
@ -72,7 +70,7 @@ RUN git clone https://github.com/Tencent/ncnn.git &&\
make -j$(nproc) &&\ make -j$(nproc) &&\
make install &&\ make install &&\
cd /root/workspace/ncnn/python &&\ cd /root/workspace/ncnn/python &&\
pip install -e . mim install -e .
ENV PATH="/root/workspace/ncnn/build/tools/quantize/:${PATH}" ENV PATH="/root/workspace/ncnn/build/tools/quantize/:${PATH}"

View File

@ -34,19 +34,19 @@ RUN curl -fsSL -v -o ~/miniconda.sh -O https://repo.anaconda.com/miniconda/Mini
/opt/conda/bin/conda install -y python=${PYTHON_VERSION} conda-build pyyaml numpy ipython cython typing typing_extensions mkl mkl-include ninja && \ /opt/conda/bin/conda install -y python=${PYTHON_VERSION} conda-build pyyaml numpy ipython cython typing typing_extensions mkl mkl-include ninja && \
/opt/conda/bin/conda clean -ya /opt/conda/bin/conda clean -ya
### pytorch ### install open-mim
RUN /opt/conda/bin/conda install pytorch==${TORCH_VERSION} torchvision==${TORCHVISION_VERSION} cudatoolkit=${CUDA} -c pytorch RUN /opt/conda/bin/pip install --no-cache-dir openmim
ENV PATH /opt/conda/bin:$PATH
### install mmcv ### pytorch mmcv onnxruntme
RUN /opt/conda/bin/pip install --no-cache-dir openmim \ RUN mim install torch==${TORCH_VERSION}+cpu torchvision==${TORCHVISION_VERSION}+cpu -f https://download.pytorch.org/whl/cpu/torch_stable.html \
&& mim install --no-cache-dir "mmcv>=2.0.0rc2,<2.1.0" && mim install --no-cache-dir "mmcv>=2.0.0rc1,<2.1.0" onnxruntime-gpu==${ONNXRUNTIME_VERSION}
ENV PATH /opt/conda/bin:$PATH
WORKDIR /root/workspace WORKDIR /root/workspace
### get onnxruntime ### get onnxruntime
RUN wget https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}.tgz \ RUN wget https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}.tgz \
&& tar -zxvf onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}.tgz &&\ && tar -zxvf onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}.tgz
pip install onnxruntime-gpu==${ONNXRUNTIME_VERSION}
### cp trt from pip to conda ### cp trt from pip to conda
RUN cp -r /usr/local/lib/python${PYTHON_VERSION}/dist-packages/tensorrt* /opt/conda/lib/python${PYTHON_VERSION}/site-packages/ RUN cp -r /usr/local/lib/python${PYTHON_VERSION}/dist-packages/tensorrt* /opt/conda/lib/python${PYTHON_VERSION}/site-packages/
@ -64,7 +64,7 @@ RUN git clone https://github.com/open-mmlab/mmdeploy &&\
cmake -DMMDEPLOY_TARGET_BACKENDS="ort;trt" .. &&\ cmake -DMMDEPLOY_TARGET_BACKENDS="ort;trt" .. &&\
make -j$(nproc) &&\ make -j$(nproc) &&\
cd .. &&\ cd .. &&\
mim install --no-cache-dir -e . mim install -e .
### build sdk ### build sdk
RUN git clone https://github.com/openppl-public/ppl.cv.git &&\ RUN git clone https://github.com/openppl-public/ppl.cv.git &&\