Fix hyperlink and support arm7 (#670)
* Update hyperlink, test=document_fix * Fix to support arm7pull/674/head
parent
a6e2114e32
commit
97f4f557e5
|
@ -9,20 +9,27 @@ THIRD_PARTY_DIR=${LITE_ROOT}/third_party
|
|||
|
||||
OPENCV_VERSION=opencv4.1.0
|
||||
|
||||
OPENCV_LIBS = ${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/libs/libopencv_imgcodecs.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/libs/libopencv_imgproc.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/libs/libopencv_core.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/libtegra_hal.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/liblibjpeg-turbo.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/liblibwebp.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/liblibpng.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/liblibjasper.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/liblibtiff.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/libIlmImf.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/libtbb.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/arm64-v8a/3rdparty/libs/libcpufeatures.a
|
||||
ifeq (${ARM_ABI}, arm8)
|
||||
ARM_PATH=arm64-v8a
|
||||
endif
|
||||
ifeq (${ARM_ABI}, arm7)
|
||||
ARM_PATH=armeabi-v7a
|
||||
endif
|
||||
|
||||
OPENCV_INCLUDE = -I../../../third_party/${OPENCV_VERSION}/arm64-v8a/include
|
||||
OPENCV_LIBS = ${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/libs/libopencv_imgcodecs.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/libs/libopencv_imgproc.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/libs/libopencv_core.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/libtegra_hal.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/liblibjpeg-turbo.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/liblibwebp.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/liblibpng.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/liblibjasper.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/liblibtiff.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/libIlmImf.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/libtbb.a \
|
||||
${THIRD_PARTY_DIR}/${OPENCV_VERSION}/${ARM_PATH}/3rdparty/libs/libcpufeatures.a
|
||||
|
||||
OPENCV_INCLUDE = -I../../../third_party/${OPENCV_VERSION}/${ARM_PATH}/include
|
||||
|
||||
CXX_INCLUDES = $(INCLUDES) ${OPENCV_INCLUDE} -I$(LITE_ROOT)/cxx/include
|
||||
|
||||
|
|
|
@ -25,8 +25,8 @@ Paddle Lite是飞桨轻量化推理引擎,为手机、IOT端提供高效推理
|
|||
1. [建议]直接下载,预测库下载链接如下:
|
||||
|平台|预测库下载链接|
|
||||
|-|-|
|
||||
|Android|[arm7](https://paddlelite-data.bj.bcebos.com/Release/2.6.1/Android/inference_lite_lib.android.armv7.gcc.c++_static.with_extra.CV_ON.tar.gz) / [arm8](https://paddlelite-data.bj.bcebos.com/Release/2.6.1/Android/inference_lite_lib.android.armv8.gcc.c++_static.with_extra.CV_ON.tar.gz)|
|
||||
|iOS|[arm7](https://paddlelite-data.bj.bcebos.com/Release/2.6.1/iOS/inference_lite_lib.ios.armv7.with_extra.CV_ON.tar.gz) / [arm8](https://paddlelite-data.bj.bcebos.com/Release/2.6.1/iOS/inference_lite_lib.ios64.armv8.with_extra.CV_ON.tar.gz)|
|
||||
|Android|[arm7](https://paddlelite-data.bj.bcebos.com/Release/2.8-rc/Android/gcc/inference_lite_lib.android.armv7.gcc.c++_static.with_extra.with_cv.tar.gz) / [arm8](https://paddlelite-data.bj.bcebos.com/Release/2.8-rc/Android/gcc/inference_lite_lib.android.armv8.gcc.c++_static.with_extra.with_cv.tar.gz)|
|
||||
|iOS|[arm7](https://paddlelite-data.bj.bcebos.com/Release/2.8-rc/iOS/inference_lite_lib.ios.armv7.with_cv.with_extra.tiny_publish.tar.gz) / [arm8](https://paddlelite-data.bj.bcebos.com/Release/2.8-rc/iOS/inference_lite_lib.ios.armv8.with_cv.with_extra.tiny_publish.tar.gz)|
|
||||
|
||||
**注**:
|
||||
1. 如果是从 Paddle-Lite [官方文档](https://paddle-lite.readthedocs.io/zh/latest/quick_start/release_lib.html#android-toolchain-gcc)下载的预测库,
|
||||
|
@ -83,9 +83,10 @@ Paddle-Lite 提供了多种策略来自动优化原始的模型,其中包括
|
|||
#### 2.1.1 [建议]pip安装paddlelite并进行转换
|
||||
|
||||
Python下安装 `paddlelite`,目前最高支持`Python3.7`。
|
||||
**注意**:`paddlelite`whl包版本必须和预测库版本对应。
|
||||
|
||||
```shell
|
||||
pip install paddlelite
|
||||
pip install paddlelite==2.8
|
||||
```
|
||||
|
||||
之后使用`paddle_lite_opt`工具可以进行inference模型的转换。`paddle_lite_opt`的部分参数如下
|
||||
|
@ -132,17 +133,13 @@ cd build.opt/lite/api/
|
|||
```shell
|
||||
# 进入PaddleClas根目录
|
||||
cd PaddleClas_root_path
|
||||
export PYTHONPATH=$PWD
|
||||
|
||||
# 下载并解压预训练模型
|
||||
wget https://paddle-imagenet-models-name.bj.bcebos.com/MobileNetV3_large_x1_0_pretrained.tar
|
||||
tar -xf MobileNetV3_large_x1_0_pretrained.tar
|
||||
|
||||
# 将预训练模型导出为inference模型
|
||||
python tools/export_model.py -m MobileNetV3_large_x1_0 -p ./MobileNetV3_large_x1_0_pretrained/ -o ./MobileNetV3_large_x1_0_inference/
|
||||
# 下载并解压inference模型
|
||||
wget https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar
|
||||
tar -xf MobileNetV3_large_x1_0_infer.tar
|
||||
|
||||
# 将inference模型转化为Paddle-Lite优化模型
|
||||
paddle_lite_opt --model_file=./MobileNetV3_large_x1_0_inference/model --param_file=./MobileNetV3_large_x1_0_inference/params --optimize_out=./MobileNetV3_large_x1_0
|
||||
paddle_lite_opt --model_file=./MobileNetV3_large_x1_0_infer/inference.pdmodel --param_file=./MobileNetV3_large_x1_0_infer/inference.pdiparams --optimize_out=./MobileNetV3_large_x1_0
|
||||
```
|
||||
|
||||
最终在当前文件夹下生成`MobileNetV3_large_x1_0.nb`的文件。
|
||||
|
|
|
@ -26,8 +26,8 @@ For the detailed compilation directions of different development environments, p
|
|||
|
||||
|Platform|Inference Library Download Link|
|
||||
|-|-|
|
||||
|Android|[arm7](https://paddlelite-data.bj.bcebos.com/Release/2.6.1/Android/inference_lite_lib.android.armv7.gcc.c++_static.with_extra.CV_ON.tar.gz) / [arm8](https://paddlelite-data.bj.bcebos.com/Release/2.6.1/Android/inference_lite_lib.android.armv8.gcc.c++_static.with_extra.CV_ON.tar.gz)|
|
||||
|iOS|[arm7](https://paddlelite-data.bj.bcebos.com/Release/2.6.1/iOS/inference_lite_lib.ios.armv7.with_extra.CV_ON.tar.gz) / [arm8](https://paddlelite-data.bj.bcebos.com/Release/2.6.1/iOS/inference_lite_lib.ios64.armv8.with_extra.CV_ON.tar.gz)|
|
||||
|Android|[arm7](https://paddlelite-data.bj.bcebos.com/Release/2.8-rc/Android/gcc/inference_lite_lib.android.armv7.gcc.c++_static.with_extra.with_cv.tar.gz) / [arm8](https://paddlelite-data.bj.bcebos.com/Release/2.8-rc/Android/gcc/inference_lite_lib.android.armv8.gcc.c++_static.with_extra.with_cv.tar.gz)|
|
||||
|iOS|[arm7](https://paddlelite-data.bj.bcebos.com/Release/2.8-rc/iOS/inference_lite_lib.ios.armv7.with_cv.with_extra.tiny_publish.tar.gz) / [arm8](https://paddlelite-data.bj.bcebos.com/Release/2.8-rc/iOS/inference_lite_lib.ios.armv8.with_cv.with_extra.tiny_publish.tar.gz)|
|
||||
|
||||
**NOTE**:
|
||||
|
||||
|
@ -78,8 +78,9 @@ Paddle-Lite provides a variety of strategies to automatically optimize the origi
|
|||
* Use pip to install Paddle-Lite. The following command uses `pip3.7` .
|
||||
|
||||
```shell
|
||||
pip install paddlelite
|
||||
pip install paddlelite==2.8
|
||||
```
|
||||
**Note**:The version of `paddlelite`'s wheel must match that of inference lib.
|
||||
|
||||
* Use `paddle_lite_opt` to optimize inference model, the parameters of `paddle_lite_opt` are as follows:
|
||||
|
||||
|
@ -121,17 +122,14 @@ Taking the `MobileNetV3_large_x1_0` model of PaddleClas as an example, we will i
|
|||
```shell
|
||||
# enter PaddleClas root directory
|
||||
cd PaddleClas_root_path
|
||||
export PYTHONPATH=$PWD
|
||||
|
||||
# download and uncompress the pre-trained model
|
||||
wget https://paddle-imagenet-models-name.bj.bcebos.com/MobileNetV3_large_x1_0_pretrained.tar
|
||||
tar -xf MobileNetV3_large_x1_0_pretrained.tar
|
||||
# download and uncompress the inference model
|
||||
wget https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar
|
||||
tar -xf MobileNetV3_large_x1_0_infer.tar
|
||||
|
||||
# export the pre-trained model as an inference model
|
||||
python tools/export_model.py -m MobileNetV3_large_x1_0 -p ./MobileNetV3_large_x1_0_pretrained/ -o ./MobileNetV3_large_x1_0_inference/
|
||||
|
||||
# convert inference model to Paddle-Lite optimized model
|
||||
paddle_lite_opt --model_file=./MobileNetV3_large_x1_0_inference/model --param_file=./MobileNetV3_large_x1_0_inference/params --optimize_out=./MobileNetV3_large_x1_0
|
||||
paddle_lite_opt --model_file=./MobileNetV3_large_x1_0_infer/inference.pdmodel --param_file=./MobileNetV3_large_x1_0_infer/inference.pdiparams --optimize_out=./MobileNetV3_large_x1_0
|
||||
```
|
||||
|
||||
When the above code command is completed, there will be ``MobileNetV3_large_x1_0.nb` in the current directory, which is the converted model file.
|
||||
|
|
Loading…
Reference in New Issue