move model url to config
parent
f2b20cff3f
commit
1740116183
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/MobileNetV3_large_x1_0_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/MobileNetV3_large_x1_0_infer
|
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/general_PPLCNet_x2_5_lite_v1.0_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/general_PPLCNet_x2_5_lite_v1.0_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/general_PPLCNet_x2_5_lite_v1.0_infer
|
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer
|
|
@ -0,0 +1,16 @@
|
|||
===========================paddle2onnx_params===========================
|
||||
model_name:PPLCNet_x0_25
|
||||
python:python3.7
|
||||
2onnx: paddle2onnx
|
||||
--model_dir:./deploy/models/PPLCNet_x0_25_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--save_file:./deploy/models/PPLCNet_x0_25_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_25_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNet_x0_25_infer
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
|
@ -1,15 +0,0 @@
|
|||
===========================paddle2onnx_params===========================
|
||||
model_name:PPLCNet_x0_35
|
||||
python:python3.7
|
||||
2onnx: paddle2onnx
|
||||
--model_dir:./deploy/models/PPLCNet_x0_35_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--save_file:./deploy/models/PPLCNet_x0_35_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNet_x0_35_infer
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
|
@ -0,0 +1,16 @@
|
|||
===========================paddle2onnx_params===========================
|
||||
model_name:PP-ShiTu_mainbody_det
|
||||
python:python3.7
|
||||
2onnx: paddle2onnx
|
||||
--model_dir:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--save_file:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
|
@ -1,15 +0,0 @@
|
|||
===========================paddle2onnx_params===========================
|
||||
model_name:PPLCNet_x0_5
|
||||
python:python3.7
|
||||
2onnx: paddle2onnx
|
||||
--model_dir:./deploy/models/PPLCNet_x0_5_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--save_file:./deploy/models/PPLCNet_x0_5_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNet_x0_5_infer
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/PPLCNet_x0_75_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_75_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNet_x0_75_infer
|
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/PPLCNet_x1_0_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_0_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNet_x1_0_infer
|
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/PPLCNet_x1_5_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_5_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNet_x1_5_infer
|
|
@ -0,0 +1,16 @@
|
|||
===========================paddle2onnx_params===========================
|
||||
model_name:PPLCNet_x2_0
|
||||
python:python3.7
|
||||
2onnx: paddle2onnx
|
||||
--model_dir:./deploy/models/PPLCNet_x2_0_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--save_file:./deploy/models/PPLCNet_x2_0_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_0_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNet_x2_0_infer
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/PPLCNet_x2_5_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_5_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNet_x2_5_infer
|
|
@ -0,0 +1,16 @@
|
|||
===========================paddle2onnx_params===========================
|
||||
model_name:PPLCNetV2_base
|
||||
python:python3.7
|
||||
2onnx: paddle2onnx
|
||||
--model_dir:./deploy/models/PPLCNetV2_base_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--save_file:./deploy/models/PPLCNetV2_base_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNetV2_base_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/PPLCNetV2_base_infer
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/ResNet50_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/ResNet50_infer
|
|
@ -8,7 +8,9 @@ python:python3.7
|
|||
--save_file:./deploy/models/ResNet50_vd_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar
|
||||
inference: python/predict_cls.py -c configs/inference_cls.yaml
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:models/ResNet50_vd_infer/
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
===========================paddle2onnx_params===========================
|
||||
model_name:ResNet50_vd
|
||||
python:python3.7
|
||||
2onnx: paddle2onnx
|
||||
--model_dir:./deploy/models/ResNet50_vd_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--save_file:./deploy/models/ResNet50_vd_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/ResNet50_vd_infer
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
|
@ -8,6 +8,7 @@ python:python3.7
|
|||
--save_file:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/SwinTransformer_tiny_patch4_window7_224_infer.tar
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/SwinTransformer_tiny_patch4_window7_224_infer
|
|
@ -1,15 +0,0 @@
|
|||
===========================paddle2onnx_params===========================
|
||||
model_name:SwinTransformer_tiny_patch4_window7_224
|
||||
python:python3.7
|
||||
2onnx: paddle2onnx
|
||||
--model_dir:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--save_file:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/inference.onnx
|
||||
--opset_version:10
|
||||
--enable_onnx_checker:True
|
||||
inference:./python/predict_cls.py
|
||||
Global.use_onnx:True
|
||||
Global.inference_model_dir:./models/SwinTransformer_tiny_patch4_window7_224_infer
|
||||
Global.use_gpu:False
|
||||
-c:configs/inference_cls.yaml
|
|
@ -10,36 +10,39 @@ PaddleServing预测功能测试的主程序为`test_paddle2onnx.sh`,可以测
|
|||
| ---- | ---- |
|
||||
| 正常模型 | GPU |
|
||||
| 正常模型 | CPU |
|
||||
| 量化模型 | GPU |
|
||||
| 量化模型 | CPU |
|
||||
|
||||
|
||||
## 2. 测试流程
|
||||
|
||||
以下内容以`ResNet50`模型的paddle2onnx测试为例
|
||||
|
||||
### 2.1 功能测试
|
||||
先运行`prepare.sh`准备数据和模型,然后运行`test_paddle2onnx.sh`进行测试,最终在`test_tipc/output`目录下生成`paddle2onnx_infer_*.log`后缀的日志文件
|
||||
先运行`prepare.sh`准备数据和模型,然后运行`test_paddle2onnx.sh`进行测试,最终在`test_tipc/output/ResNet50`目录下生成`paddle2onnx_infer_*.log`后缀的日志文件
|
||||
下方展示以PPHGNet_small为例的测试命令与结果。
|
||||
|
||||
```shell
|
||||
bash test_tipc/prepare.sh ./test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt paddle2onnx_infer
|
||||
bash test_tipc/prepare.sh ./test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt paddle2onnx_infer
|
||||
|
||||
# 用法:
|
||||
bash test_tipc/test_paddle2onnx.sh ./test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt
|
||||
bash test_tipc/test_paddle2onnx.sh ./test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
|
||||
```
|
||||
|
||||
#### 运行结果
|
||||
|
||||
各测试的运行情况会打印在 `test_tipc/output/results_paddle2onnx.log` 中:
|
||||
各测试的运行情况会打印在 `./test_tipc/output/ResNet50/results_paddle2onnx.log` 中:
|
||||
运行成功时会输出:
|
||||
|
||||
```
|
||||
Run successfully with command - paddle2onnx --model_dir=./deploy/models/PPHGNet_tiny_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/PPHGNet_tiny_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True!
|
||||
Run successfully with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/PPHGNet_tiny_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/paddle2onnx_infer_cpu.log 2>&1 && cd ../!
|
||||
Run successfully with command - paddle2onnx --model_dir=./deploy/models/ResNet50_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/ResNet50_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True!
|
||||
Run successfully with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/ResNet50_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/ResNet50/paddle2onnx_infer_cpu.log 2>&1 && cd ../!
|
||||
|
||||
```
|
||||
|
||||
运行失败时会输出:
|
||||
|
||||
```
|
||||
Run failed with command - paddle2onnx --model_dir=./deploy/models/PPHGNet_tiny_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/PPHGNet_tiny_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True!
|
||||
Run failed with command - paddle2onnx --model_dir=./deploy/models/ResNet50_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/ResNet50_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True!
|
||||
Run failed with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/ResNet50_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/ResNet50/paddle2onnx_infer_cpu.log 2>&1 && cd ../!
|
||||
...
|
||||
```
|
||||
|
||||
|
|
|
@ -174,161 +174,18 @@ fi
|
|||
if [ ${MODE} = "paddle2onnx_infer" ];then
|
||||
# prepare paddle2onnx env
|
||||
python_name=$(func_parser_value "${lines[2]}")
|
||||
inference_model_url=$(func_parser_value "${lines[10]}")
|
||||
tar_name=${inference_model_url##*/}
|
||||
|
||||
${python_name} -m pip install install paddle2onnx
|
||||
${python_name} -m pip install onnxruntime
|
||||
if [ ${model_name} == "ResNet50" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_infer.tar
|
||||
tar xf ResNet50_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "ResNet50_vd" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar
|
||||
tar xf ResNet50_vd_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "MobileNetV3_large_x1_0" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar
|
||||
tar xf MobileNetV3_large_x1_0_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "SwinTransformer_tiny_patch4_window7_224" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/SwinTransformer_tiny_patch4_window7_224_infer.tar
|
||||
tar xf SwinTransformer_tiny_patch4_window7_224_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNet_x0_25" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_25_infer.tar
|
||||
tar xf PPLCNet_x0_25_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNet_x0_35" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_35_infer.tar
|
||||
tar xf PPLCNet_x0_35_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNet_x0_5" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_5_infer.tar
|
||||
tar xf PPLCNet_x0_5_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNet_x0_75" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_75_infer.tar
|
||||
tar xf PPLCNet_x0_75_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNet_x1_0" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_0_infer.tar
|
||||
tar xf PPLCNet_x1_0_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNet_x1_5" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_5_infer.tar
|
||||
tar xf PPLCNet_x1_5_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNet_x2_0" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_0_infer.tar
|
||||
tar xf PPLCNet_x2_0_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNet_x2_5" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_5_infer.tar
|
||||
tar xf PPLCNet_x2_5_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PP-ShiTu_general_rec" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/general_PPLCNet_x2_5_lite_v1.0_infer.tar
|
||||
tar xf general_PPLCNet_x2_5_lite_v1.0_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PP-ShiTu_mainbody_det" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar
|
||||
tar xf picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPLCNetV2_base" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNetV2_base_infer.tar
|
||||
tar xf PPLCNetV2_base_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPHGNet_tiny" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPHGNet_tiny_infer.tar
|
||||
tar xf PPHGNet_tiny_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
if [ ${model_name} == "PPHGNet_small" ]; then
|
||||
# wget model
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPHGNet_small_infer.tar
|
||||
tar xf PPHGNet_small_infer.tar
|
||||
cd ../../
|
||||
fi
|
||||
cd deploy
|
||||
mkdir models
|
||||
cd models
|
||||
wget -nc ${inference_model_url}
|
||||
tar xf ${tar_name}
|
||||
cd ../../
|
||||
|
||||
|
||||
fi
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ python=$(func_parser_value "${lines[2]}")
|
|||
|
||||
|
||||
# parser params
|
||||
dataline=$(awk 'NR==1, NR==15{print}' $FILENAME)
|
||||
dataline=$(awk 'NR==1, NR==16{print}' $FILENAME)
|
||||
IFS=$'\n'
|
||||
lines=(${dataline})
|
||||
|
||||
|
@ -32,17 +32,17 @@ opset_version_value=$(func_parser_value "${lines[8]}")
|
|||
enable_onnx_checker_key=$(func_parser_key "${lines[9]}")
|
||||
enable_onnx_checker_value=$(func_parser_value "${lines[9]}")
|
||||
# parser onnx inference
|
||||
inference_py=$(func_parser_value "${lines[10]}")
|
||||
use_onnx_key=$(func_parser_key "${lines[11]}")
|
||||
use_onnx_value=$(func_parser_value "${lines[11]}")
|
||||
inference_model_dir_key=$(func_parser_key "${lines[12]}")
|
||||
inference_model_dir_value=$(func_parser_value "${lines[12]}")
|
||||
inference_hardware_key=$(func_parser_key "${lines[13]}")
|
||||
inference_hardware_value=$(func_parser_value "${lines[13]}")
|
||||
inference_config_key=$(func_parser_key "${lines[14]}")
|
||||
inference_config_value=$(func_parser_value "${lines[14]}")
|
||||
inference_py=$(func_parser_value "${lines[11]}")
|
||||
use_onnx_key=$(func_parser_key "${lines[12]}")
|
||||
use_onnx_value=$(func_parser_value "${lines[12]}")
|
||||
inference_model_dir_key=$(func_parser_key "${lines[13]}")
|
||||
inference_model_dir_value=$(func_parser_value "${lines[13]}")
|
||||
inference_hardware_key=$(func_parser_key "${lines[14]}")
|
||||
inference_hardware_value=$(func_parser_value "${lines[14]}")
|
||||
inference_config_key=$(func_parser_key "${lines[15]}")
|
||||
inference_config_value=$(func_parser_value "${lines[15]}")
|
||||
|
||||
LOG_PATH="./test_tipc/output"
|
||||
LOG_PATH="./test_tipc/output/${model_name}"
|
||||
mkdir -p ./test_tipc/output
|
||||
status_log="${LOG_PATH}/results_paddle2onnx.log"
|
||||
|
||||
|
|
Loading…
Reference in New Issue