add cpp serving tipc
parent
d75b1229b3
commit
88fc3318c5
|
@ -30,7 +30,7 @@ client.load_client_config(sys.argv[1:])
|
|||
client.connect(["127.0.0.1:9293"])
|
||||
|
||||
import paddle
|
||||
test_img_dir = "../../doc/imgs/"
|
||||
test_img_dir = "../../doc/imgs/1.jpg"
|
||||
|
||||
ocr_reader = OCRReader(char_dict_path="../../ppocr/utils/ppocr_keys_v1.txt")
|
||||
|
||||
|
@ -40,12 +40,29 @@ def cv2_to_base64(image):
|
|||
'utf8') #data.tostring()).decode('utf8')
|
||||
|
||||
|
||||
for img_file in os.listdir(test_img_dir):
|
||||
with open(os.path.join(test_img_dir, img_file), 'rb') as file:
|
||||
def _check_image_file(path):
|
||||
img_end = {'jpg', 'bmp', 'png', 'jpeg', 'rgb', 'tif', 'tiff', 'gif'}
|
||||
return any([path.lower().endswith(e) for e in img_end])
|
||||
|
||||
|
||||
test_img_list = []
|
||||
if os.path.isfile(test_img_dir) and _check_image_file(test_img_dir):
|
||||
test_img_list.append(test_img_dir)
|
||||
elif os.path.isdir(test_img_dir):
|
||||
for single_file in os.listdir(test_img_dir):
|
||||
file_path = os.path.join(test_img_dir, single_file)
|
||||
if os.path.isfile(file_path) and _check_image_file(file_path):
|
||||
test_img_list.append(file_path)
|
||||
if len(test_img_list) == 0:
|
||||
raise Exception("not found any img file in {}".format(test_img_dir))
|
||||
|
||||
for img_file in test_img_list:
|
||||
with open(img_file, 'rb') as file:
|
||||
image_data = file.read()
|
||||
image = cv2_to_base64(image_data)
|
||||
res_list = []
|
||||
fetch_map = client.predict(feed={"x": image}, fetch=[], batch=True)
|
||||
print(fetch_map)
|
||||
one_batch_res = ocr_reader.postprocess(fetch_map, with_score=True)
|
||||
for res in one_batch_res:
|
||||
res_list.append(res[0])
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
feed_var {
|
||||
name: "x"
|
||||
alias_name: "x"
|
||||
is_lod_tensor: false
|
||||
feed_type: 20
|
||||
shape: 1
|
||||
}
|
||||
fetch_var {
|
||||
name: "save_infer_model/scale_0.tmp_1"
|
||||
alias_name: "save_infer_model/scale_0.tmp_1"
|
||||
is_lod_tensor: false
|
||||
fetch_type: 1
|
||||
shape: 1
|
||||
shape: 640
|
||||
shape: 640
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
#使用镜像:
|
||||
#registry.baidubce.com/paddlepaddle/paddle:latest-dev-cuda10.1-cudnn7-gcc82
|
||||
|
||||
#编译Serving Server:
|
||||
|
||||
#client和app可以直接使用release版本
|
||||
|
||||
#server因为加入了自定义OP,需要重新编译
|
||||
|
||||
apt-get update
|
||||
apt install -y libcurl4-openssl-dev libbz2-dev
|
||||
wget https://paddle-serving.bj.bcebos.com/others/centos_ssl.tar && tar xf centos_ssl.tar && rm -rf centos_ssl.tar && mv libcrypto.so.1.0.2k /usr/lib/libcrypto.so.1.0.2k && mv libssl.so.1.0.2k /usr/lib/libssl.so.1.0.2k && ln -sf /usr/lib/libcrypto.so.1.0.2k /usr/lib/libcrypto.so.10 && ln -sf /usr/lib/libssl.so.1.0.2k /usr/lib/libssl.so.10 && ln -sf /usr/lib/libcrypto.so.10 /usr/lib/libcrypto.so && ln -sf /usr/lib/libssl.so.10 /usr/lib/libssl.so
|
||||
|
||||
# 安装go依赖
|
||||
rm -rf /usr/local/go
|
||||
wget -qO- https://paddle-ci.cdn.bcebos.com/go1.17.2.linux-amd64.tar.gz | tar -xz -C /usr/local
|
||||
export GOROOT=/usr/local/go
|
||||
export GOPATH=/root/gopath
|
||||
export PATH=$PATH:$GOPATH/bin:$GOROOT/bin
|
||||
go env -w GO111MODULE=on
|
||||
go env -w GOPROXY=https://goproxy.cn,direct
|
||||
go install github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway@v1.15.2
|
||||
go install github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger@v1.15.2
|
||||
go install github.com/golang/protobuf/protoc-gen-go@v1.4.3
|
||||
go install google.golang.org/grpc@v1.33.0
|
||||
go env -w GO111MODULE=auto
|
||||
|
||||
# 下载opencv库
|
||||
wget https://paddle-qa.bj.bcebos.com/PaddleServing/opencv3.tar.gz && tar -xvf opencv3.tar.gz && rm -rf opencv3.tar.gz
|
||||
export OPENCV_DIR=$PWD/opencv3
|
||||
|
||||
# clone Serving
|
||||
git clone https://github.com/PaddlePaddle/Serving.git -b develop --depth=1
|
||||
cd Serving
|
||||
export Serving_repo_path=$PWD
|
||||
git submodule update --init --recursive
|
||||
python -m pip install -r python/requirements.txt
|
||||
|
||||
|
||||
export PYTHON_INCLUDE_DIR=$(python -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())")
|
||||
export PYTHON_LIBRARIES=$(python -c "import distutils.sysconfig as sysconfig; print(sysconfig.get_config_var('LIBDIR'))")
|
||||
export PYTHON_EXECUTABLE=`which python`
|
||||
|
||||
export CUDA_PATH='/usr/local/cuda'
|
||||
export CUDNN_LIBRARY='/usr/local/cuda/lib64/'
|
||||
export CUDA_CUDART_LIBRARY='/usr/local/cuda/lib64/'
|
||||
export TENSORRT_LIBRARY_PATH='/usr/local/TensorRT6-cuda10.1-cudnn7/targets/x86_64-linux-gnu/'
|
||||
|
||||
# cp 自定义OP代码
|
||||
cp -rf ../deploy/pdserving/general_detection_op.cpp ${Serving_repo_path}/core/general-server/op
|
||||
|
||||
# 编译Server, export SERVING_BIN
|
||||
mkdir server-build-gpu-opencv && cd server-build-gpu-opencv
|
||||
cmake -DPYTHON_INCLUDE_DIR=$PYTHON_INCLUDE_DIR \
|
||||
-DPYTHON_LIBRARIES=$PYTHON_LIBRARIES \
|
||||
-DPYTHON_EXECUTABLE=$PYTHON_EXECUTABLE \
|
||||
-DCUDA_TOOLKIT_ROOT_DIR=${CUDA_PATH} \
|
||||
-DCUDNN_LIBRARY=${CUDNN_LIBRARY} \
|
||||
-DCUDA_CUDART_LIBRARY=${CUDA_CUDART_LIBRARY} \
|
||||
-DTENSORRT_ROOT=${TENSORRT_LIBRARY_PATH} \
|
||||
-DOPENCV_DIR=${OPENCV_DIR} \
|
||||
-DWITH_OPENCV=ON \
|
||||
-DSERVER=ON \
|
||||
-DWITH_GPU=ON ..
|
||||
make -j32
|
||||
|
||||
python -m pip install python/dist/paddle*
|
||||
export SERVING_BIN=$PWD/core/general-server/serving
|
||||
cd ../../
|
|
@ -0,0 +1,19 @@
|
|||
===========================serving_params===========================
|
||||
model_name:ch_PP-OCRv2
|
||||
python:python3.7
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--det_dirname:./inference/ch_PP-OCRv2_det_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--det_serving_server:./deploy/pdserving/ppocr_det_v2_serving/
|
||||
--det_serving_client:./deploy/pdserving/ppocr_det_v2_client/
|
||||
--rec_dirname:./inference/ch_PP-OCRv2_rec_infer/
|
||||
--rec_serving_server:./deploy/pdserving/ppocr_rec_v2_serving/
|
||||
--rec_serving_client:./deploy/pdserving/ppocr_rec_v2_client/
|
||||
serving_dir:./deploy/pdserving
|
||||
web_service:-m paddle_serving_server.serve
|
||||
--op:GeneralDetectionOp GeneralInferOp
|
||||
--port:9293
|
||||
device:gpu
|
||||
cpp_client:ocr_cpp_client.py
|
||||
--image_dir:../../doc/imgs
|
|
@ -0,0 +1,19 @@
|
|||
===========================serving_params===========================
|
||||
model_name:ch_PP-OCRv3
|
||||
python:python3.7
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--det_dirname:./inference/ch_PP-OCRv3_det_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--det_serving_server:./deploy/pdserving/ppocr_det_v3_serving/
|
||||
--det_serving_client:./deploy/pdserving/ppocr_det_v3_client/
|
||||
--rec_dirname:./inference/ch_PP-OCRv3_rec_infer/
|
||||
--rec_serving_server:./deploy/pdserving/ppocr_rec_v3_serving/
|
||||
--rec_serving_client:./deploy/pdserving/ppocr_rec_v3_client/
|
||||
serving_dir:./deploy/pdserving
|
||||
web_service:-m paddle_serving_server.serve
|
||||
--op:GeneralDetectionOp GeneralInferOp
|
||||
--port:9293
|
||||
device:gpu
|
||||
cpp_client:ocr_cpp_client.py
|
||||
--image_dir:../../doc/imgs
|
|
@ -0,0 +1,19 @@
|
|||
===========================serving_params===========================
|
||||
model_name:ch_ppocr_mobile_v2.0
|
||||
python:python3.7
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--det_dirname:./inference/ch_ppocr_mobile_v2.0_det_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--det_serving_server:./deploy/pdserving/ppocr_det_mobile_serving/
|
||||
--det_serving_client:./deploy/pdserving/ppocr_det_mobile_client/
|
||||
--rec_dirname:./inference/ch_ppocr_mobile_v2.0_rec_infer/
|
||||
--rec_serving_server:./deploy/pdserving/ppocr_rec_mobile_serving/
|
||||
--rec_serving_client:./deploy/pdserving/ppocr_rec_mobile_client/
|
||||
serving_dir:./deploy/pdserving
|
||||
web_service:-m paddle_serving_server.serve
|
||||
--op:GeneralDetectionOp GeneralInferOp
|
||||
--port:9293
|
||||
device:gpu
|
||||
cpp_client:ocr_cpp_client.py
|
||||
--image_dir:../../doc/imgs
|
|
@ -0,0 +1,19 @@
|
|||
===========================serving_params===========================
|
||||
model_name:ch_ppocr_server_v2.0
|
||||
python:python3.7
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--det_dirname:./inference/ch_ppocr_server_v2.0_det_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--det_serving_server:./deploy/pdserving/ppocr_det_server_serving/
|
||||
--det_serving_client:./deploy/pdserving/ppocr_det_server_client/
|
||||
--rec_dirname:./inference/ch_ppocr_server_v2.0_rec_infer/
|
||||
--rec_serving_server:./deploy/pdserving/ppocr_rec_server_serving/
|
||||
--rec_serving_client:./deploy/pdserving/ppocr_rec_server_client/
|
||||
serving_dir:./deploy/pdserving
|
||||
web_service:-m paddle_serving_server.serve
|
||||
--op:GeneralDetectionOp GeneralInferOp
|
||||
--port:9293
|
||||
device:gpu
|
||||
cpp_client:ocr_cpp_client.py
|
||||
--image_dir:../../doc/imgs
|
|
@ -0,0 +1,184 @@
|
|||
#!/bin/bash
|
||||
source test_tipc/common_func.sh
|
||||
|
||||
function func_parser_model_config(){
|
||||
strs=$1
|
||||
IFS="/"
|
||||
array=(${strs})
|
||||
tmp=${array[-1]}
|
||||
echo ${tmp}
|
||||
}
|
||||
|
||||
FILENAME=$1
|
||||
dataline=$(awk 'NR==1, NR==19{print}' $FILENAME)
|
||||
|
||||
# parser params
|
||||
IFS=$'\n'
|
||||
lines=(${dataline})
|
||||
|
||||
# parser serving
|
||||
model_name=$(func_parser_value "${lines[1]}")
|
||||
python_list=$(func_parser_value "${lines[2]}")
|
||||
trans_model_py=$(func_parser_value "${lines[3]}")
|
||||
det_infer_model_dir_key=$(func_parser_key "${lines[4]}")
|
||||
det_infer_model_dir_value=$(func_parser_value "${lines[4]}")
|
||||
model_filename_key=$(func_parser_key "${lines[5]}")
|
||||
model_filename_value=$(func_parser_value "${lines[5]}")
|
||||
params_filename_key=$(func_parser_key "${lines[6]}")
|
||||
params_filename_value=$(func_parser_value "${lines[6]}")
|
||||
det_serving_server_key=$(func_parser_key "${lines[7]}")
|
||||
det_serving_server_value=$(func_parser_value "${lines[7]}")
|
||||
det_serving_client_key=$(func_parser_key "${lines[8]}")
|
||||
det_serving_client_value=$(func_parser_value "${lines[8]}")
|
||||
rec_infer_model_dir_key=$(func_parser_key "${lines[9]}")
|
||||
rec_infer_model_dir_value=$(func_parser_value "${lines[9]}")
|
||||
rec_serving_server_key=$(func_parser_key "${lines[10]}")
|
||||
rec_serving_server_value=$(func_parser_value "${lines[10]}")
|
||||
rec_serving_client_key=$(func_parser_key "${lines[11]}")
|
||||
rec_serving_client_value=$(func_parser_value "${lines[11]}")
|
||||
det_server_value=$(func_parser_model_config "${lines[7]}")
|
||||
det_client_value=$(func_parser_model_config "${lines[8]}")
|
||||
rec_server_value=$(func_parser_model_config "${lines[10]}")
|
||||
rec_client_value=$(func_parser_model_config "${lines[11]}")
|
||||
serving_dir_value=$(func_parser_value "${lines[12]}")
|
||||
web_service_py=$(func_parser_value "${lines[13]}")
|
||||
op_key=$(func_parser_key "${lines[14]}")
|
||||
op_value=$(func_parser_value "${lines[14]}")
|
||||
port_key=$(func_parser_key "${lines[15]}")
|
||||
port_value=$(func_parser_value "${lines[15]}")
|
||||
device_value=$(func_parser_value "${lines[16]}")
|
||||
cpp_client_py=$(func_parser_value "${lines[17]}")
|
||||
image_dir_key=$(func_parser_key "${lines[18]}")
|
||||
image_dir_value=$(func_parser_value "${lines[18]}")
|
||||
|
||||
LOG_PATH="$(pwd)/test_tipc/output/${model_name}/cpp_serving"
|
||||
mkdir -p ${LOG_PATH}
|
||||
status_log="${LOG_PATH}/results_cpp_serving.log"
|
||||
|
||||
function func_serving(){
|
||||
IFS='|'
|
||||
_python=$1
|
||||
_script=$2
|
||||
_model_dir=$3
|
||||
# pdserving
|
||||
set_model_filename=$(func_set_params "${model_filename_key}" "${model_filename_value}")
|
||||
set_params_filename=$(func_set_params "${params_filename_key}" "${params_filename_value}")
|
||||
if [ ${model_name} = "ch_PP-OCRv2" ] || [ ${model_name} = "ch_PP-OCRv3" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0" ] || [ ${model_name} = "ch_ppocr_server_v2.0" ]; then
|
||||
# trans det
|
||||
set_dirname=$(func_set_params "--dirname" "${det_infer_model_dir_value}")
|
||||
set_serving_server=$(func_set_params "--serving_server" "${det_serving_server_value}")
|
||||
set_serving_client=$(func_set_params "--serving_client" "${det_serving_client_value}")
|
||||
python_list=(${python_list})
|
||||
trans_model_cmd="${python_list[0]} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}"
|
||||
eval $trans_model_cmd
|
||||
cp "deploy/pdserving/serving_client_conf.prototxt" ${det_serving_client_value}
|
||||
# trans rec
|
||||
set_dirname=$(func_set_params "--dirname" "${rec_infer_model_dir_value}")
|
||||
set_serving_server=$(func_set_params "--serving_server" "${rec_serving_server_value}")
|
||||
set_serving_client=$(func_set_params "--serving_client" "${rec_serving_client_value}")
|
||||
python_list=(${python_list})
|
||||
trans_model_cmd="${python_list[0]} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}"
|
||||
eval $trans_model_cmd
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_det" ] || [ ${model_name} = "ch_PP-OCRv3_det" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_det" ] || [ ${model_name} = "ch_ppocr_server_v2.0_det" ]; then
|
||||
# trans det
|
||||
set_dirname=$(func_set_params "--dirname" "${det_infer_model_dir_value}")
|
||||
set_serving_server=$(func_set_params "--serving_server" "${det_serving_server_value}")
|
||||
set_serving_client=$(func_set_params "--serving_client" "${det_serving_client_value}")
|
||||
python_list=(${python_list})
|
||||
trans_model_cmd="${python_list[0]} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}"
|
||||
eval $trans_model_cmd
|
||||
cp "deploy/pdserving/serving_client_conf.prototxt" ${det_serving_client_value}
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_rec" ] || [ ${model_name} = "ch_PP-OCRv3_rec" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_rec" ] || [ ${model_name} = "ch_ppocr_server_v2.0_rec" ]; then
|
||||
# trans rec
|
||||
set_dirname=$(func_set_params "--dirname" "${rec_infer_model_dir_value}")
|
||||
set_serving_server=$(func_set_params "--serving_server" "${rec_serving_server_value}")
|
||||
set_serving_client=$(func_set_params "--serving_client" "${rec_serving_client_value}")
|
||||
python_list=(${python_list})
|
||||
trans_model_cmd="${python_list[0]} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}"
|
||||
eval $trans_model_cmd
|
||||
fi
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${trans_model_cmd}" "${status_log}" "${model_name}"
|
||||
set_image_dir=$(func_set_params "${image_dir_key}" "${image_dir_value}")
|
||||
python_list=(${python_list})
|
||||
|
||||
cd ${serving_dir_value}
|
||||
export SERVING_BIN=/paddle/kaitao/tipc/PaddleOCR-cppinfer/deploy/pdserving/Serving/build_server/core/general-server/serving
|
||||
# cpp serving
|
||||
unset https_proxy
|
||||
unset http_proxy
|
||||
for device in ${device_value[*]}; do
|
||||
if [ ${device} = "cpu" ]; then
|
||||
if [ ${model_name} = "ch_PP-OCRv2" ] || [ ${model_name} = "ch_PP-OCRv3" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0" ] || [ ${model_name} = "ch_ppocr_server_v2.0" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} > serving_log_cpu.log &"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_det" ] || [ ${model_name} = "ch_PP-OCRv3_det" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_det" ] || [ ${model_name} = "ch_ppocr_server_v2.0_det" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${op_key} ${op_value} ${port_key} ${port_value} > serving_log_cpu.log &"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_rec" ] || [ ${model_name} = "ch_PP-OCRv3_rec" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_rec" ] || [ ${model_name} = "ch_ppocr_server_v2.0_rec" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} > serving_log_cpu.log &"
|
||||
fi
|
||||
eval $web_service_cpp_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${web_service_cpp_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
_save_log_path="${LOG_PATH}/server_infer_cpp_cpu.log"
|
||||
if [ ${model_name} = "ch_PP-OCRv2" ] || [ ${model_name} = "ch_PP-OCRv3" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0" ] || [ ${model_name} = "ch_ppocr_server_v2.0" ]; then
|
||||
cpp_client_cmd="${python_list[0]} ${cpp_client_py} ${det_client_value} ${rec_client_value} > ${_save_log_path} 2>&1"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_det" ] || [ ${model_name} = "ch_PP-OCRv3_det" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_det" ] || [ ${model_name} = "ch_ppocr_server_v2.0_det" ]; then
|
||||
cpp_client_cmd="${python_list[0]} ${cpp_client_py} ${det_client_value} > ${_save_log_path} 2>&1"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_rec" ] || [ ${model_name} = "ch_PP-OCRv3_rec" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_rec" ] || [ ${model_name} = "ch_ppocr_server_v2.0_rec" ]; then
|
||||
cpp_client_cmd="${python_list[0]} ${cpp_client_py} ${rec_client_value} > ${_save_log_path} 2>&1"
|
||||
fi
|
||||
eval $cpp_client_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
status_check $last_status "${cpp_client_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
ps ux | grep -i ${port_value} | awk '{print $2}' | xargs kill -s 9
|
||||
ps ux | grep -i ${web_service_py} | awk '{print $2}' | xargs kill -s 9
|
||||
elif [ ${device} = "gpu" ]; then
|
||||
if [ ${model_name} = "ch_PP-OCRv2" ] || [ ${model_name} = "ch_PP-OCRv3" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0" ] || [ ${model_name} = "ch_ppocr_server_v2.0" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} --gpu_id=0 > serving_log_gpu.log &"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_det" ] || [ ${model_name} = "ch_PP-OCRv3_det" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_det" ] || [ ${model_name} = "ch_ppocr_server_v2.0_det" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${det_server_value} ${op_key} ${op_value} ${port_key} ${port_value} --gpu_id=0 > serving_log_gpu.log &"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_rec" ] || [ ${model_name} = "ch_PP-OCRv3_rec" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_rec" ] || [ ${model_name} = "ch_ppocr_server_v2.0_rec" ]; then
|
||||
web_service_cpp_cmd="${python_list[0]} ${web_service_py} --model ${rec_server_value} ${op_key} ${op_value} ${port_key} ${port_value} --gpu_id=0 > serving_log_gpu.log &"
|
||||
fi
|
||||
eval $web_service_cpp_cmd
|
||||
sleep 5s
|
||||
_save_log_path="${LOG_PATH}/server_infer_cpp_gpu.log"
|
||||
if [ ${model_name} = "ch_PP-OCRv2" ] || [ ${model_name} = "ch_PP-OCRv3" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0" ] || [ ${model_name} = "ch_ppocr_server_v2.0" ]; then
|
||||
cpp_client_cmd="${python_list[0]} ${cpp_client_py} ${det_client_value} ${rec_client_value} > ${_save_log_path} 2>&1"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_det" ] || [ ${model_name} = "ch_PP-OCRv3_det" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_det" ] || [ ${model_name} = "ch_ppocr_server_v2.0_det" ]; then
|
||||
cpp_client_cmd="${python_list[0]} ${cpp_client_py} ${det_client_value} > ${_save_log_path} 2>&1"
|
||||
elif [ ${model_name} = "ch_PP-OCRv2_rec" ] || [ ${model_name} = "ch_PP-OCRv3_rec" ] || [ ${model_name} = "ch_ppocr_mobile_v2.0_rec" ] || [ ${model_name} = "ch_ppocr_server_v2.0_rec" ]; then
|
||||
cpp_client_cmd="${python_list[0]} ${cpp_client_py} ${rec_client_value} > ${_save_log_path} 2>&1"
|
||||
fi
|
||||
eval $cpp_client_cmd
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
status_check $last_status "${cpp_client_cmd}" "${status_log}" "${model_name}"
|
||||
sleep 5s
|
||||
ps ux | grep -i ${port_value} | awk '{print $2}' | xargs kill -s 9
|
||||
ps ux | grep -i ${web_service_py} | awk '{print $2}' | xargs kill -s 9
|
||||
else
|
||||
echo "Does not support hardware other than CPU and GPU Currently!"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
#set cuda device
|
||||
GPUID=$2
|
||||
if [ ${#GPUID} -le 0 ];then
|
||||
env="export CUDA_VISIBLE_DEVICES=0"
|
||||
else
|
||||
env="export CUDA_VISIBLE_DEVICES=${GPUID}"
|
||||
fi
|
||||
eval $env
|
||||
echo $env
|
||||
|
||||
|
||||
echo "################### run test ###################"
|
||||
|
||||
export Count=0
|
||||
IFS="|"
|
||||
func_serving "${web_service_cpp_cmd}"
|
Loading…
Reference in New Issue