add tipc for cpp infer
parent
18dec7fa78
commit
7399cbac20
|
@ -22,15 +22,16 @@ import cv2
|
|||
from paddle_serving_app.reader import Sequential, URL2Image, ResizeByFactor
|
||||
from paddle_serving_app.reader import Div, Normalize, Transpose
|
||||
from ocr_reader import OCRReader
|
||||
import codecs
|
||||
|
||||
client = Client()
|
||||
# TODO:load_client need to load more than one client model.
|
||||
# this need to figure out some details.
|
||||
client.load_client_config(sys.argv[1:])
|
||||
client.connect(["127.0.0.1:9293"])
|
||||
client.connect(["127.0.0.1:8181"]) # 9293
|
||||
|
||||
import paddle
|
||||
test_img_dir = "../../doc/imgs/1.jpg"
|
||||
test_img_dir = "../../doc/imgs/"
|
||||
|
||||
ocr_reader = OCRReader(char_dict_path="../../ppocr/utils/ppocr_keys_v1.txt")
|
||||
|
||||
|
@ -62,9 +63,21 @@ for img_file in test_img_list:
|
|||
image = cv2_to_base64(image_data)
|
||||
res_list = []
|
||||
fetch_map = client.predict(feed={"x": image}, fetch=[], batch=True)
|
||||
print(fetch_map)
|
||||
one_batch_res = ocr_reader.postprocess(fetch_map, with_score=True)
|
||||
for res in one_batch_res:
|
||||
res_list.append(res[0])
|
||||
res = {"res": str(res_list)}
|
||||
print(res)
|
||||
if fetch_map is None:
|
||||
print('no results')
|
||||
else:
|
||||
if "text" in fetch_map:
|
||||
for x in fetch_map["text"]:
|
||||
x = codecs.encode(x)
|
||||
words = base64.b64decode(x).decode('utf-8')
|
||||
res_list.append(words)
|
||||
else:
|
||||
try:
|
||||
one_batch_res = ocr_reader.postprocess(
|
||||
fetch_map, with_score=True)
|
||||
for res in one_batch_res:
|
||||
res_list.append(res[0])
|
||||
except:
|
||||
print('no results')
|
||||
res = {"res": str(res_list)}
|
||||
print(res)
|
||||
|
|
|
@ -13,7 +13,7 @@ trans_model:-m paddle_serving_client.convert
|
|||
serving_dir:./deploy/pdserving
|
||||
web_service:-m paddle_serving_server.serve
|
||||
--op:GeneralDetectionOp GeneralInferOp
|
||||
--port:9293
|
||||
device:gpu
|
||||
--port:8181
|
||||
--gpu_id:"0"|null
|
||||
cpp_client:ocr_cpp_client.py
|
||||
--image_dir:../../doc/imgs/1.jpg
|
||||
|
|
|
@ -13,7 +13,7 @@ trans_model:-m paddle_serving_client.convert
|
|||
serving_dir:./deploy/pdserving
|
||||
web_service:-m paddle_serving_server.serve
|
||||
--op:GeneralDetectionOp GeneralInferOp
|
||||
--port:9293
|
||||
device:gpu
|
||||
--port:8181
|
||||
--gpu_id:"0"|null
|
||||
cpp_client:ocr_cpp_client.py
|
||||
--image_dir:../../doc/imgs/1.jpg
|
||||
|
|
|
@ -13,7 +13,7 @@ trans_model:-m paddle_serving_client.convert
|
|||
serving_dir:./deploy/pdserving
|
||||
web_service:-m paddle_serving_server.serve
|
||||
--op:GeneralDetectionOp GeneralInferOp
|
||||
--port:9293
|
||||
device:gpu
|
||||
--port:8181
|
||||
--gpu_id:"0"|null
|
||||
cpp_client:ocr_cpp_client.py
|
||||
--image_dir:../../doc/imgs/1.jpg
|
||||
|
|
|
@ -13,7 +13,7 @@ trans_model:-m paddle_serving_client.convert
|
|||
serving_dir:./deploy/pdserving
|
||||
web_service:-m paddle_serving_server.serve
|
||||
--op:GeneralDetectionOp GeneralInferOp
|
||||
--port:9293
|
||||
device:gpu
|
||||
--port:8181
|
||||
--gpu_id:"0"|null
|
||||
cpp_client:ocr_cpp_client.py
|
||||
--image_dir:../../doc/imgs/1.jpg
|
||||
|
|
|
@ -47,7 +47,8 @@ op_key=$(func_parser_key "${lines[14]}")
|
|||
op_value=$(func_parser_value "${lines[14]}")
|
||||
port_key=$(func_parser_key "${lines[15]}")
|
||||
port_value=$(func_parser_value "${lines[15]}")
|
||||
device_value=$(func_parser_value "${lines[16]}")
|
||||
gpu_key=$(func_parser_key "${lines[16]}")
|
||||
gpu_value=$(func_parser_value "${lines[16]}")
|
||||
cpp_client_py=$(func_parser_value "${lines[17]}")
|
||||
image_dir_key=$(func_parser_key "${lines[18]}")
|
||||
image_dir_value=$(func_parser_value "${lines[18]}")
|
||||
|
@ -108,8 +109,8 @@ function func_serving(){
|
|||
# cpp serving
|
||||
unset https_proxy
|
||||
unset http_proxy
|
||||
for device in ${device_value[*]}; do
|
||||
if [ ${device} = "cpu" ]; then
|
||||
for gpu_id in ${gpu_value[*]}; do
|
||||
if [ ${gpu_id} = "null" ]; then
|
||||
if [ ${model_name} = "ch_PP-OCRv2" ] || [ ${model_name} = "ch_PP-OCRv3" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0" ] || [ ${model_name} = "ch_ppocr_server_v2.0" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} > serving_log_cpu.log &"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_det" ] || [ ${model_name} = "ch_PP-OCRv3_det" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_det" ] || [ ${model_name} = "ch_ppocr_server_v2.0_det" ]; then
|
||||
|
@ -132,16 +133,16 @@ function func_serving(){
|
|||
eval $cpp_client_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${cpp_client_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
# sleep 5s
|
||||
ps ux | grep -i ${port_value} | awk '{print $2}' | xargs kill -s 9
|
||||
ps ux | grep -i ${web_service_py} | awk '{print $2}' | xargs kill -s 9
|
||||
elif [ ${device} = "gpu" ]; then
|
||||
# ps ux | grep -i ${web_service_py} | awk '{print $2}' | xargs kill -s 9
|
||||
else
|
||||
if [ ${model_name} = "ch_PP-OCRv2" ] || [ ${model_name} = "ch_PP-OCRv3" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0" ] || [ ${model_name} = "ch_ppocr_server_v2.0" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} --gpu_id=0 > serving_log_gpu.log &"
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} ${gpu_key} ${gpu_id} > serving_log_gpu.log &"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_det" ] || [ ${model_name} = "ch_PP-OCRv3_det" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_det" ] || [ ${model_name} = "ch_ppocr_server_v2.0_det" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${op_key} ${op_value} ${port_key} ${port_value} --gpu_id=0 > serving_log_gpu.log &"
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${op_key} ${op_value} ${port_key} ${port_value} ${gpu_key} ${gpu_id} > serving_log_gpu.log &"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_rec" ] || [ ${model_name} = "ch_PP-OCRv3_rec" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_rec" ] || [ ${model_name} = "ch_ppocr_server_v2.0_rec" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} --gpu_id=0 > serving_log_gpu.log &"
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} ${gpu_key} ${gpu_id} > serving_log_gpu.log &"
|
||||
fi
|
||||
eval $web_service_cpp_cmd
|
||||
sleep 5s
|
||||
|
@ -157,11 +158,9 @@ function func_serving(){
|
|||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${cpp_client_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
# sleep 5s
|
||||
ps ux | grep -i ${port_value} | awk '{print $2}' | xargs kill -s 9
|
||||
ps ux | grep -i ${web_service_py} | awk '{print $2}' | xargs kill -s 9
|
||||
else
|
||||
echo "Does not support hardware other than CPU and GPU Currently!"
|
||||
# ps ux | grep -i ${web_service_py} | awk '{print $2}' | xargs kill -s 9
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue