add kl_quant chain and polish prepare.sh
parent
2c64a6469e
commit
725c0da267
|
@ -1,5 +1,5 @@
|
|||
===========================cpp_infer_params===========================
|
||||
model_name:MobileNetV3_large_x1_0_kl
|
||||
model_name:MobileNetV3_large_x1_0_kl_quant
|
||||
cpp_infer_type:cls
|
||||
cls_inference_model_dir:./MobileNetV3_large_x1_0_kl_quant_infer/
|
||||
det_inference_model_dir:
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
===========================serving_params===========================
|
||||
model_name:MobileNetV3_large_x1_0_kl_quant
|
||||
python:python3.7
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/slim_model/MobileNetV3_large_x1_0_kl_quant_infer.tar
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--dirname:./deploy/paddleserving/MobileNetV3_large_x1_0_kl_quant_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--serving_server:./deploy/paddleserving/MobileNetV3_large_x1_0_kl_quant_serving/
|
||||
--serving_client:./deploy/paddleserving/MobileNetV3_large_x1_0_kl_quant_client/
|
||||
serving_dir:./deploy/paddleserving
|
||||
web_service:null
|
||||
--use_gpu:0|null
|
||||
pipline:test_cpp_serving_client.py
|
|
@ -0,0 +1,14 @@
|
|||
===========================serving_params===========================
|
||||
model_name:MobileNetV3_large_x1_0_kl_quant
|
||||
python:python3.7
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--dirname:./deploy/paddleserving/MobileNetV3_large_x1_0_kl_quant_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--serving_server:./deploy/paddleserving/MobileNetV3_large_x1_0_kl_quant_serving/
|
||||
--serving_client:./deploy/paddleserving/MobileNetV3_large_x1_0_kl_quant_client/
|
||||
serving_dir:./deploy/paddleserving
|
||||
web_service:classification_web_service.py
|
||||
--use_gpu:0|null
|
||||
pipline:pipeline_http_client.py
|
|
@ -0,0 +1,14 @@
|
|||
===========================serving_params===========================
|
||||
model_name:ResNet50_vd_kl_quant
|
||||
python:python3.7
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/slim_model/ResNet50_vd_kl_quant_infer.tar
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--dirname:./deploy/paddleserving/ResNet50_vd_kl_quant_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--serving_server:./deploy/paddleserving/ResNet50_vd_kl_quant_serving/
|
||||
--serving_client:./deploy/paddleserving/ResNet50_vd_kl_quant_client/
|
||||
serving_dir:./deploy/paddleserving
|
||||
web_service:null
|
||||
--use_gpu:0|null
|
||||
pipline:test_cpp_serving_client.py
|
|
@ -0,0 +1,14 @@
|
|||
===========================serving_params===========================
|
||||
model_name:ResNet50_vd_kl_quant
|
||||
python:python3.7
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/slim_model/ResNet50_vd_kl_quant_infer.tar
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--dirname:./deploy/paddleserving/ResNet50_vd_kl_quant_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--serving_server:./deploy/paddleserving/ResNet50_vd_kl_quant_serving/
|
||||
--serving_client:./deploy/paddleserving/ResNet50_vd_kl_quant_client/
|
||||
serving_dir:./deploy/paddleserving
|
||||
web_service:classification_web_service.py
|
||||
--use_gpu:0|null
|
||||
pipline:pipeline_http_client.py
|
|
@ -6,25 +6,27 @@ Linux GPU/CPU C++ 推理功能测试的主程序为`test_inference_cpp.sh`,可
|
|||
|
||||
- 推理相关:
|
||||
|
||||
| 算法名称 | 模型名称 | device_CPU | device_GPU |
|
||||
| :----: | :----: | :----: | :----: |
|
||||
| MobileNetV3 | MobileNetV3_large_x1_0 | 支持 | 支持 |
|
||||
| PP-ShiTu | PPShiTu_general_rec、PPShiTu_mainbody_det | 支持 | 支持 |
|
||||
| PP-ShiTu | PPShiTu_mainbody_det | 支持 | 支持 |
|
||||
| PPHGNet | PPHGNet_small | 支持 | 支持 |
|
||||
| PPHGNet | PPHGNet_tiny | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x0_25 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x0_35 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x0_5 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x0_75 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x1_0 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x1_5 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x2_0 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x2_5 | 支持 | 支持 |
|
||||
| PPLCNetV2 | PPLCNetV2_base | 支持 | 支持 |
|
||||
| ResNet | ResNet50 | 支持 | 支持 |
|
||||
| ResNet | ResNet50_vd | 支持 | 支持 |
|
||||
| SwinTransformer | SwinTransformer_tiny_patch4_window7_224 | 支持 | 支持 |
|
||||
| 算法名称 | 模型名称 | device_CPU | device_GPU |
|
||||
| :-------------: | :---------------------------------------: | :--------: | :--------: |
|
||||
| MobileNetV3 | MobileNetV3_large_x1_0 | 支持 | 支持 |
|
||||
| MobileNetV3 | MobileNetV3_large_x1_0-KL | 支持 | 支持 |
|
||||
| PP-ShiTu | PPShiTu_general_rec、PPShiTu_mainbody_det | 支持 | 支持 |
|
||||
| PP-ShiTu | PPShiTu_mainbody_det | 支持 | 支持 |
|
||||
| PPHGNet | PPHGNet_small | 支持 | 支持 |
|
||||
| PPHGNet | PPHGNet_tiny | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x0_25 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x0_35 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x0_5 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x0_75 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x1_0 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x1_5 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x2_0 | 支持 | 支持 |
|
||||
| PPLCNet | PPLCNet_x2_5 | 支持 | 支持 |
|
||||
| PPLCNetV2 | PPLCNetV2_base | 支持 | 支持 |
|
||||
| ResNet | ResNet50 | 支持 | 支持 |
|
||||
| ResNet | ResNet50_vd | 支持 | 支持 |
|
||||
| ResNet | ResNet50_vd-KL | 支持 | 支持 |
|
||||
| SwinTransformer | SwinTransformer_tiny_patch4_window7_224 | 支持 | 支持 |
|
||||
|
||||
## 2. 测试流程(以**ResNet50**为例)
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ Linux GPU/CPU C++ 服务化部署测试的主程序为`test_serving_infer_cpp.sh
|
|||
| 算法名称 | 模型名称 | device_CPU | device_GPU |
|
||||
| :-------------: | :---------------------------------------: | :--------: | :--------: |
|
||||
| MobileNetV3 | MobileNetV3_large_x1_0 | 支持 | 支持 |
|
||||
| MobileNetV3 | MobileNetV3_large_x1_0-KL | 支持 | 支持 |
|
||||
| PP-ShiTu | PPShiTu_general_rec、PPShiTu_mainbody_det | 支持 | 支持 |
|
||||
| PPHGNet | PPHGNet_small | 支持 | 支持 |
|
||||
| PPHGNet | PPHGNet_tiny | 支持 | 支持 |
|
||||
|
@ -24,6 +25,7 @@ Linux GPU/CPU C++ 服务化部署测试的主程序为`test_serving_infer_cpp.sh
|
|||
| PPLCNetV2 | PPLCNetV2_base | 支持 | 支持 |
|
||||
| ResNet | ResNet50 | 支持 | 支持 |
|
||||
| ResNet | ResNet50_vd | 支持 | 支持 |
|
||||
| ResNet | ResNet50_vd-KL | 支持 | 支持 |
|
||||
| SwinTransformer | SwinTransformer_tiny_patch4_window7_224 | 支持 | 支持 |
|
||||
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ Linux GPU/CPU PYTHON 服务化部署测试的主程序为`test_serving_infer_pyt
|
|||
| 算法名称 | 模型名称 | device_CPU | device_GPU |
|
||||
| :-------------: | :---------------------------------------: | :--------: | :--------: |
|
||||
| MobileNetV3 | MobileNetV3_large_x1_0 | 支持 | 支持 |
|
||||
| MobileNetV3 | MobileNetV3_large_x1_0-KL | 支持 | 支持 |
|
||||
| PP-ShiTu | PPShiTu_general_rec、PPShiTu_mainbody_det | 支持 | 支持 |
|
||||
| PPHGNet | PPHGNet_small | 支持 | 支持 |
|
||||
| PPHGNet | PPHGNet_tiny | 支持 | 支持 |
|
||||
|
@ -24,6 +25,7 @@ Linux GPU/CPU PYTHON 服务化部署测试的主程序为`test_serving_infer_pyt
|
|||
| PPLCNetV2 | PPLCNetV2_base | 支持 | 支持 |
|
||||
| ResNet | ResNet50 | 支持 | 支持 |
|
||||
| ResNet | ResNet50_vd | 支持 | 支持 |
|
||||
| ResNet | ResNet50_vd-KL | 支持 | 支持 |
|
||||
| SwinTransformer | SwinTransformer_tiny_patch4_window7_224 | 支持 | 支持 |
|
||||
|
||||
|
||||
|
|
|
@ -50,8 +50,6 @@ if [[ ${MODE} = "cpp_infer" ]]; then
|
|||
echo "################### build opencv ###################"
|
||||
rm -rf ./deploy/cpp/opencv-3.4.7.tar.gz ./deploy/cpp/opencv-3.4.7/
|
||||
pushd ./deploy/cpp/
|
||||
wget -nc https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/GPU/x86-64_gcc8.2_avx_mkl_cuda10.1_cudnn7.6.5_trt6.0.1.5/paddle_inference.tgz
|
||||
tar xf paddle_inference.tgz
|
||||
wget -nc https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/opencv-3.4.7.tar.gz
|
||||
tar -xf opencv-3.4.7.tar.gz
|
||||
|
||||
|
@ -85,6 +83,12 @@ if [[ ${MODE} = "cpp_infer" ]]; then
|
|||
popd
|
||||
echo "################### build opencv finished ###################"
|
||||
fi
|
||||
if [[ ! -d "./deploy/cpp/paddle_inference/" ]]; then
|
||||
pushd ./deploy/cpp/
|
||||
wget -nc https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/GPU/x86-64_gcc8.2_avx_mkl_cuda10.1_cudnn7.6.5_trt6.0.1.5/paddle_inference.tgz
|
||||
tar xf paddle_inference.tgz
|
||||
popd
|
||||
fi
|
||||
if [[ $FILENAME == *infer_cpp_linux_gpu_cpu.txt ]]; then
|
||||
cpp_type=$(func_parser_value "${lines[2]}")
|
||||
cls_inference_model_dir=$(func_parser_value "${lines[3]}")
|
||||
|
|
Loading…
Reference in New Issue