infer_python
parent
234fb5abd8
commit
bd097dbe76
|
@ -6,10 +6,10 @@ infer_export:null
|
|||
infer_quant:False
|
||||
inference:tools/infer/predict_system.py
|
||||
--use_gpu:False|True
|
||||
--enable_mkldnn:False|True
|
||||
--cpu_threads:1|6
|
||||
--enable_mkldnn:False
|
||||
--cpu_threads:6
|
||||
--rec_batch_num:1
|
||||
--use_tensorrt:False|True
|
||||
--use_tensorrt:False
|
||||
--precision:fp32
|
||||
--det_model_dir:
|
||||
--image_dir:./inference/ch_det_data_50/all-sum-510/
|
||||
|
|
|
@ -6,10 +6,10 @@ infer_export:null
|
|||
infer_quant:False
|
||||
inference:tools/infer/predict_system.py --rec_image_shape="3,48,320"
|
||||
--use_gpu:False|True
|
||||
--enable_mkldnn:False|True
|
||||
--cpu_threads:1|6
|
||||
--enable_mkldnn:False
|
||||
--cpu_threads:6
|
||||
--rec_batch_num:1
|
||||
--use_tensorrt:False|True
|
||||
--use_tensorrt:False
|
||||
--precision:fp32
|
||||
--det_model_dir:
|
||||
--image_dir:./inference/ch_det_data_50/all-sum-510/
|
||||
|
|
|
@ -6,10 +6,10 @@ infer_export:null
|
|||
infer_quant:False
|
||||
inference:tools/infer/predict_system.py
|
||||
--use_gpu:False|True
|
||||
--enable_mkldnn:False|True
|
||||
--cpu_threads:1|6
|
||||
--enable_mkldnn:False
|
||||
--cpu_threads:6
|
||||
--rec_batch_num:1
|
||||
--use_tensorrt:False|True
|
||||
--use_tensorrt:False
|
||||
--precision:fp32
|
||||
--det_model_dir:
|
||||
--image_dir:./inference/ch_det_data_50/all-sum-510/
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
===========================infer_params===========================
|
||||
model_name:ocr_det
|
||||
model_name:ch_ppocr_mobile_v2.0_det
|
||||
python:python
|
||||
infer_model:./inference/ch_ppocr_mobile_v2.0_det_infer
|
||||
infer_export:null
|
||||
|
@ -7,10 +7,10 @@ infer_quant:False
|
|||
inference:tools/infer/predict_det.py
|
||||
--use_gpu:True|False
|
||||
--enable_mkldnn:False
|
||||
--cpu_threads:1|6
|
||||
--cpu_threads:6
|
||||
--rec_batch_num:1
|
||||
--use_tensorrt:False|True
|
||||
--precision:fp16|fp32
|
||||
--use_tensorrt:False
|
||||
--precision:fp32
|
||||
--det_model_dir:
|
||||
--image_dir:./inference/ch_det_data_50/all-sum-510/
|
||||
null:null
|
||||
|
|
|
@ -4,7 +4,7 @@ python:python
|
|||
gpu_list:-1
|
||||
Global.use_gpu:False
|
||||
Global.auto_cast:null
|
||||
Global.epoch_num:lite_train_lite_infer=5|whole_train_whole_infer=300
|
||||
Global.epoch_num:lite_train_lite_infer=5|whole_train_whole_infer=50
|
||||
Global.save_model_dir:./output/
|
||||
Train.loader.batch_size_per_card:lite_train_lite_infer=2|whole_train_whole_infer=4
|
||||
Global.pretrained_model:null
|
||||
|
|
|
@ -4,7 +4,7 @@ python:python
|
|||
gpu_list:0
|
||||
Global.use_gpu:True
|
||||
Global.auto_cast:fp32|amp
|
||||
Global.epoch_num:lite_train_lite_infer=5|whole_train_whole_infer=300
|
||||
Global.epoch_num:lite_train_lite_infer=5|whole_train_whole_infer=50
|
||||
Global.save_model_dir:./output/
|
||||
Train.loader.batch_size_per_card:lite_train_lite_infer=2|whole_train_whole_infer=4
|
||||
Global.pretrained_model:null
|
||||
|
@ -39,10 +39,10 @@ infer_export:tools/export_model.py -c configs/det/ch_ppocr_v2.0/ch_det_mv3_db_v2
|
|||
infer_quant:False
|
||||
inference:tools/infer/predict_det.py
|
||||
--use_gpu:True|False
|
||||
--enable_mkldnn:True|False
|
||||
--enable_mkldnn:False
|
||||
--cpu_threads:1|6
|
||||
--rec_batch_num:1
|
||||
--use_tensorrt:False|True
|
||||
--use_tensorrt:False
|
||||
--precision:fp32|fp16|int8
|
||||
--det_model_dir:
|
||||
--image_dir:./inference/ch_det_data_50/all-sum-510/
|
||||
|
|
|
@ -6,8 +6,8 @@ infer_export:null
|
|||
infer_quant:True
|
||||
inference:tools/infer/predict_system.py
|
||||
--use_gpu:False|True
|
||||
--enable_mkldnn:False|True
|
||||
--cpu_threads:1|6
|
||||
--enable_mkldnn:False
|
||||
--cpu_threads:6
|
||||
--rec_batch_num:1
|
||||
--use_tensorrt:False
|
||||
--precision:fp32
|
||||
|
|
|
@ -88,7 +88,7 @@ function func_inference(){
|
|||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${command}" "${status_log}"
|
||||
status_check $last_status "${command}" "${status_log}" "${model_name}"
|
||||
done
|
||||
done
|
||||
done
|
||||
|
@ -119,7 +119,7 @@ function func_inference(){
|
|||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${command}" "${status_log}"
|
||||
status_check $last_status "${command}" "${status_log}" "${model_name}"
|
||||
|
||||
done
|
||||
done
|
||||
|
@ -153,7 +153,7 @@ if [ ${MODE} = "whole_infer" ]; then
|
|||
echo ${infer_run_exports[Count]}
|
||||
eval $export_cmd
|
||||
status_export=$?
|
||||
status_check $status_export "${export_cmd}" "${status_log}"
|
||||
status_check $status_export "${export_cmd}" "${status_log}" "${model_name}"
|
||||
else
|
||||
save_infer_dir=${infer_model}
|
||||
fi
|
||||
|
|
Loading…
Reference in New Issue