update status_check function
parent
1dee07fa76
commit
e0a4307a44
|
@ -225,7 +225,7 @@ for batch_size in ${batch_size_list[*]}; do
|
|||
echo $cmd
|
||||
eval $cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${cmd}" "${status_log}"
|
||||
status_check $last_status "${cmd}" "${status_log}" "${model_name}"
|
||||
else
|
||||
IFS=";"
|
||||
unset_env=`unset CUDA_VISIBLE_DEVICES`
|
||||
|
@ -261,7 +261,7 @@ for batch_size in ${batch_size_list[*]}; do
|
|||
echo $cmd
|
||||
eval $cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${cmd}" "${status_log}"
|
||||
status_check $last_status "${cmd}" "${status_log}" "${model_name}"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
|
|
@ -63,7 +63,7 @@ function func_shitu_cpp_inference(){
|
|||
command="${_script} > ${_save_log_path} 2>&1"
|
||||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${command}" "${status_log}"
|
||||
status_check $last_status "${command}" "${status_log}" "${model_name}"
|
||||
done
|
||||
done
|
||||
done
|
||||
|
@ -87,7 +87,7 @@ function func_shitu_cpp_inference(){
|
|||
command="${_script} > ${_save_log_path} 2>&1"
|
||||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${command}" "${status_log}"
|
||||
status_check $last_status "${command}" "${status_log}" "${model_name}"
|
||||
done
|
||||
done
|
||||
done
|
||||
|
@ -125,7 +125,7 @@ function func_cls_cpp_inference(){
|
|||
command1="${_script} > ${_save_log_path} 2>&1"
|
||||
eval ${command1}
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${command1}" "${status_log}"
|
||||
status_check $last_status "${command1}" "${status_log}" "${model_name}"
|
||||
done
|
||||
done
|
||||
done
|
||||
|
@ -148,7 +148,7 @@ function func_cls_cpp_inference(){
|
|||
command="${_script} > ${_save_log_path} 2>&1"
|
||||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${command}" "${status_log}"
|
||||
status_check $last_status "${command}" "${status_log}" "${model_name}"
|
||||
done
|
||||
done
|
||||
done
|
||||
|
|
|
@ -71,7 +71,7 @@ if [ ${MODE} = "whole_infer" ]; then
|
|||
echo $export_cmd
|
||||
eval $export_cmd
|
||||
status_export=$?
|
||||
status_check $status_export "${export_cmd}" "${status_log}"
|
||||
status_check $status_export "${export_cmd}" "${status_log}" "${model_name}"
|
||||
else
|
||||
save_infer_dir=${infer_model}
|
||||
fi
|
||||
|
|
|
@ -67,7 +67,7 @@ function func_test_tipc(){
|
|||
eval ${command1}
|
||||
command2="adb shell 'export LD_LIBRARY_PATH=${lite_arm_work_path}; ${real_inference_cmd}' > ${_save_log_path} 2>&1"
|
||||
eval ${command2}
|
||||
status_check $? "${command2}" "${status_log}"
|
||||
status_check $? "${command2}" "${status_log}" "${model_name}"
|
||||
done
|
||||
done
|
||||
done
|
||||
|
|
|
@ -55,7 +55,7 @@ function func_paddle2onnx(){
|
|||
trans_model_cmd="${padlle2onnx_cmd} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_save_model} ${set_opset_version} ${set_enable_onnx_checker}"
|
||||
eval $trans_model_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${trans_model_cmd}" "${status_log}" "${model_name}"
|
||||
status_check $last_status "${trans_model_cmd}" "${status_log}" "${model_name}" "${model_name}"
|
||||
|
||||
# python inference
|
||||
set_model_dir=$(func_set_params "${inference_model_dir_key}" "${inference_model_dir_value}")
|
||||
|
@ -64,7 +64,7 @@ function func_paddle2onnx(){
|
|||
set_inference_config=$(func_set_params "${inference_config_key}" "${inference_config_value}")
|
||||
infer_model_cmd="cd deploy && ${python} ${inference_py} -o ${set_model_dir} -o ${set_use_onnx} -o ${set_hardware} ${set_inference_config} > ${_save_log_path} 2>&1 && cd ../"
|
||||
eval $infer_model_cmd
|
||||
status_check $last_status "${infer_model_cmd}" "${status_log}" "${model_name}"
|
||||
status_check $last_status "${infer_model_cmd}" "${status_log}" "${model_name}" "${model_name}"
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@ function func_serving_cls(){
|
|||
_save_log_path="${LOG_PATH}/server_infer_cpp_cpu_pipeline_usemkldnn_False_threads_4_batchsize_1.log"
|
||||
pipeline_cmd="${python} ocr_cpp_client.py ppocr_det_mobile_2.0_client/ ppocr_rec_mobile_2.0_client/"
|
||||
eval $pipeline_cmd
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9
|
||||
else
|
||||
|
@ -98,7 +98,7 @@ function func_serving_cls(){
|
|||
_save_log_path="${LOG_PATH}/server_infer_cpp_cpu_pipeline_usemkldnn_False_threads_4_batchsize_1.log"
|
||||
pipeline_cmd="${python} ocr_cpp_client.py ppocr_det_mobile_2.0_client/ ppocr_rec_mobile_2.0_client/"
|
||||
eval $pipeline_cmd
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9
|
||||
fi
|
||||
|
@ -124,7 +124,7 @@ function func_serving_cls(){
|
|||
eval $pipeline_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
done
|
||||
ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9
|
||||
|
@ -156,7 +156,7 @@ function func_serving_cls(){
|
|||
eval $pipeline_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
done
|
||||
ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9
|
||||
|
@ -250,7 +250,7 @@ function func_serving_rec(){
|
|||
_save_log_path="${LOG_PATH}/server_infer_cpp_cpu_pipeline_usemkldnn_False_threads_4_batchsize_1.log"
|
||||
pipeline_cmd="${python} ocr_cpp_client.py ppocr_det_mobile_2.0_client/ ppocr_rec_mobile_2.0_client/"
|
||||
eval $pipeline_cmd
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9
|
||||
else
|
||||
|
@ -260,7 +260,7 @@ function func_serving_rec(){
|
|||
_save_log_path="${LOG_PATH}/server_infer_cpp_cpu_pipeline_usemkldnn_False_threads_4_batchsize_1.log"
|
||||
pipeline_cmd="${python} ocr_cpp_client.py ppocr_det_mobile_2.0_client/ ppocr_rec_mobile_2.0_client/"
|
||||
eval $pipeline_cmd
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9
|
||||
fi
|
||||
|
@ -286,7 +286,7 @@ function func_serving_rec(){
|
|||
eval $pipeline_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
done
|
||||
ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9
|
||||
|
@ -318,7 +318,7 @@ function func_serving_rec(){
|
|||
eval $pipeline_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}"
|
||||
status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 10s
|
||||
done
|
||||
ps ux | grep -E 'web_service|pipeline' | awk '{print $2}' | xargs kill -s 9
|
||||
|
|
|
@ -126,7 +126,7 @@ function func_inference(){
|
|||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${command}" "../${status_log}"
|
||||
status_check $last_status "${command}" "../${status_log}" "${model_name}"
|
||||
done
|
||||
done
|
||||
done
|
||||
|
@ -151,7 +151,7 @@ function func_inference(){
|
|||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${command}" "../${status_log}"
|
||||
status_check $last_status "${command}" "../${status_log}" "${model_name}"
|
||||
done
|
||||
done
|
||||
done
|
||||
|
@ -198,7 +198,7 @@ elif [[ ${MODE} = "klquant_whole_infer" ]]; then
|
|||
command="${python} ${kl_quant_cmd_value}"
|
||||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${command}" "${status_log}"
|
||||
status_check $last_status "${command}" "${status_log}" "${model_name}"
|
||||
cd inference/quant_post_static_model
|
||||
ln -s __model__ inference.pdmodel
|
||||
ln -s __params__ inference.pdiparams
|
||||
|
@ -301,7 +301,7 @@ else
|
|||
# export FLAGS_cudnn_deterministic=True
|
||||
sleep 5
|
||||
eval $cmd
|
||||
status_check $? "${cmd}" "${status_log}"
|
||||
status_check $? "${cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5
|
||||
|
||||
if [[ $FILENAME == *GeneralRecognition* ]]; then
|
||||
|
@ -318,7 +318,7 @@ else
|
|||
set_eval_params1=$(func_set_params "${eval_key1}" "${eval_value1}")
|
||||
eval_cmd="${python} ${eval_py} ${set_eval_pretrain} ${set_use_gpu} ${set_eval_params1}"
|
||||
eval $eval_cmd
|
||||
status_check $? "${eval_cmd}" "${status_log}"
|
||||
status_check $? "${eval_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5
|
||||
fi
|
||||
# run export model
|
||||
|
@ -333,7 +333,7 @@ else
|
|||
set_save_infer_key=$(func_set_params "${save_infer_key}" "${save_infer_path}")
|
||||
export_cmd="${python} ${run_export} ${set_export_weight} ${set_save_infer_key}"
|
||||
eval $export_cmd
|
||||
status_check $? "${export_cmd}" "${status_log}"
|
||||
status_check $? "${export_cmd}" "${status_log}" "${model_name}"
|
||||
|
||||
#run inference
|
||||
eval $env
|
||||
|
|
Loading…
Reference in New Issue