This commit is contained in:
HydrogenSulfate 2022-06-13 14:37:30 +08:00
parent cb93e20dee
commit d1e5bea2cb
8 changed files with 108 additions and 181 deletions

View File

@ -22,181 +22,102 @@ import faiss
import os
import pickle
class MainbodyDetect():
"""
pp-shitu mainbody detect.
include preprocess, process, postprocess
return detect results
Attention: Postprocess include num limit and box filter; no nms
"""
def __init__(self):
self.preprocess = DetectionSequential([
DetectionFile2Image(), DetectionNormalize(
[0.485, 0.456, 0.406], [0.229, 0.224, 0.225], True),
DetectionResize(
(640, 640), False, interpolation=2), DetectionTranspose(
(2, 0, 1))
])
self.client = Client()
self.client.load_client_config(
"../../models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_client/serving_client_conf.prototxt"
)
self.client.connect(['127.0.0.1:9293'])
self.max_det_result = 5
self.conf_threshold = 0.2
def predict(self, imgpath):
im, im_info = self.preprocess(imgpath)
im_shape = np.array(im.shape[1:]).reshape(-1)
scale_factor = np.array(list(im_info['scale_factor'])).reshape(-1)
fetch_map = self.client.predict(
feed={
"image": im,
"im_shape": im_shape,
"scale_factor": scale_factor,
},
fetch=["save_infer_model/scale_0.tmp_1"],
batch=False)
return self.postprocess(fetch_map, imgpath)
def postprocess(self, fetch_map, imgpath):
#1. get top max_det_result
det_results = fetch_map["save_infer_model/scale_0.tmp_1"]
if len(det_results) > self.max_det_result:
boxes_reserved = fetch_map[
"save_infer_model/scale_0.tmp_1"][:self.max_det_result]
else:
boxes_reserved = det_results
#2. do conf threshold
boxes_list = []
for i in range(boxes_reserved.shape[0]):
if (boxes_reserved[i, 1]) > self.conf_threshold:
boxes_list.append(boxes_reserved[i, :])
#3. add origin image box
origin_img = cv2.imread(imgpath)
boxes_list.append(
np.array([0, 1.0, 0, 0, origin_img.shape[1], origin_img.shape[0]]))
return np.array(boxes_list)
rec_nms_thresold = 0.05
rec_score_thres = 0.5
feature_normalize = True
return_k = 1
index_dir = "../../drink_dataset_v1.0/index"
class ObjectRecognition():
"""
pp-shitu object recognion for all objects detected by MainbodyDetect.
include preprocess, process, postprocess
preprocess include preprocess for each image and batching.
Batch process
postprocess include retrieval and nms
"""
def init_index(index_dir):
assert os.path.exists(os.path.join(
index_dir, "vector.index")), "vector.index not found ..."
assert os.path.exists(os.path.join(
index_dir, "id_map.pkl")), "id_map.pkl not found ... "
def __init__(self):
self.client = Client()
self.client.load_client_config(
"../../models/general_PPLCNet_x2_5_lite_v1.0_client/serving_client_conf.prototxt"
)
self.client.connect(["127.0.0.1:9294"])
searcher = faiss.read_index(os.path.join(index_dir, "vector.index"))
self.seq = Sequential([
BGR2RGB(), Resize((224, 224)), Div(255),
Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225],
False), Transpose((2, 0, 1))
])
self.searcher, self.id_map = self.init_index()
self.rec_nms_thresold = 0.05
self.rec_score_thres = 0.5
self.feature_normalize = True
self.return_k = 1
def init_index(self):
index_dir = "../../drink_dataset_v1.0/index"
assert os.path.exists(os.path.join(
index_dir, "vector.index")), "vector.index not found ..."
assert os.path.exists(os.path.join(
index_dir, "id_map.pkl")), "id_map.pkl not found ... "
searcher = faiss.read_index(os.path.join(index_dir, "vector.index"))
with open(os.path.join(index_dir, "id_map.pkl"), "rb") as fd:
id_map = pickle.load(fd)
return searcher, id_map
def predict(self, det_boxes, imgpath):
#1. preprocess
batch_imgs = []
origin_img = cv2.imread(imgpath)
for i in range(det_boxes.shape[0]):
box = det_boxes[i]
x1, y1, x2, y2 = [int(x) for x in box[2:]]
cropped_img = origin_img[y1:y2, x1:x2, :].copy()
tmp = self.seq(cropped_img)
batch_imgs.append(tmp)
batch_imgs = np.array(batch_imgs)
#2. process
fetch_map = self.client.predict(
feed={"x": batch_imgs}, fetch=["features"], batch=True)
batch_features = fetch_map["features"]
#3. postprocess
if self.feature_normalize:
feas_norm = np.sqrt(
np.sum(np.square(batch_features), axis=1, keepdims=True))
batch_features = np.divide(batch_features, feas_norm)
scores, docs = self.searcher.search(batch_features, self.return_k)
results = []
for i in range(scores.shape[0]):
pred = {}
if scores[i][0] >= self.rec_score_thres:
pred["bbox"] = [int(x) for x in det_boxes[i, 2:]]
pred["rec_docs"] = self.id_map[docs[i][0]].split()[1]
pred["rec_scores"] = scores[i][0]
results.append(pred)
return self.nms_to_rec_results(results)
def nms_to_rec_results(self, results):
filtered_results = []
x1 = np.array([r["bbox"][0] for r in results]).astype("float32")
y1 = np.array([r["bbox"][1] for r in results]).astype("float32")
x2 = np.array([r["bbox"][2] for r in results]).astype("float32")
y2 = np.array([r["bbox"][3] for r in results]).astype("float32")
scores = np.array([r["rec_scores"] for r in results])
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
while order.size > 0:
i = order[0]
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= self.rec_nms_thresold)[0]
order = order[inds + 1]
filtered_results.append(results[i])
return filtered_results
with open(os.path.join(index_dir, "id_map.pkl"), "rb") as fd:
id_map = pickle.load(fd)
return searcher, id_map
#get box
def nms_to_rec_results(results, thresh=0.1):
filtered_results = []
x1 = np.array([r["bbox"][0] for r in results]).astype("float32")
y1 = np.array([r["bbox"][1] for r in results]).astype("float32")
x2 = np.array([r["bbox"][2] for r in results]).astype("float32")
y2 = np.array([r["bbox"][3] for r in results]).astype("float32")
scores = np.array([r["rec_scores"] for r in results])
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
while order.size > 0:
i = order[0]
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= thresh)[0]
order = order[inds + 1]
filtered_results.append(results[i])
return filtered_results
def postprocess(fetch_dict, feature_normalize, det_boxes, searcher, id_map,
return_k, rec_score_thres, rec_nms_thresold):
batch_features = fetch_dict["features"]
#do feature norm
if feature_normalize:
feas_norm = np.sqrt(
np.sum(np.square(batch_features), axis=1, keepdims=True))
batch_features = np.divide(batch_features, feas_norm)
scores, docs = searcher.search(batch_features, return_k)
results = []
for i in range(scores.shape[0]):
pred = {}
if scores[i][0] >= rec_score_thres:
pred["bbox"] = [int(x) for x in det_boxes[i, 2:]]
pred["rec_docs"] = id_map[docs[i][0]].split()[1]
pred["rec_scores"] = scores[i][0]
results.append(pred)
#do nms
results = nms_to_rec_results(results, rec_nms_thresold)
return results
#do client
if __name__ == "__main__":
det = MainbodyDetect()
rec = ObjectRecognition()
client = Client()
client.load_client_config([
"../../models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_client",
"../../models/general_PPLCNet_x2_5_lite_v1.0_client"
])
client.connect(['127.0.0.1:9400'])
#1. get det_results
imgpath = "../../drink_dataset_v1.0/test_images/001.jpeg"
det_results = det.predict(imgpath)
#2. get rec_results
rec_results = rec.predict(det_results, imgpath)
print(rec_results)
im = cv2.imread("../../drink_dataset_v1.0/test_images/001.jpeg")
im_shape = np.array(im.shape[:2]).reshape(-1)
fetch_map = client.predict(
feed={"image": im,
"im_shape": im_shape},
fetch=["features", "boxes"],
batch=False)
print(fetch_map.keys())
#add retrieval procedure
det_boxes = fetch_map["boxes"]
print(det_boxes)
searcher, id_map = init_index(index_dir)
results = postprocess(fetch_map, feature_normalize, det_boxes, searcher,
id_map, return_k, rec_score_thres, rec_nms_thresold)
print(results)

View File

@ -52,7 +52,7 @@ Linux GPU/CPU PYTHON 服务化部署测试的主程序为`test_serving_infer.sh
```
- 安装 PaddleServing 相关组件包括serving_client、serving-app自动编译带自定义OP的serving_server包测试PP-ShiTu时以及自动下载并解压推理模型
```bash
bash test_tipc/prepare.sh test_tipc/configs/ResNet50/ResNet50_linux_gpu_normal_normal_serving_cpp_linux_gpu_cpu.txt serving_infer
bash test_tipc/prepare.sh test_tipc/configs/PPLCNet/PPLCNet_x1_0_linux_gpu_normal_normal_serving_cpp_linux_gpu_cpu.txt serving_infer
```
### 2.3 功能测试
@ -63,24 +63,28 @@ Linux GPU/CPU PYTHON 服务化部署测试的主程序为`test_serving_infer.sh
bash test_tipc/test_serving_infer.sh ${your_params_file}
```
`ResNet50`的`Linux GPU/CPU PYTHON 服务化部署测试`为例,命令如下所示。
`PPLCNet_x1_0`的`Linux GPU/CPU C++ 服务化部署测试`为例,命令如下所示。
```bash
bash test_tipc/test_serving_infer.sh test_tipc/configs/ResNet50/ResNet50_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt
bash test_tipc/test_serving_infer.sh test_tipc/configs/PPLCNet/PPLCNet_x1_0_linux_gpu_normal_normal_serving_cpp_linux_gpu_cpu.txt
```
输出结果如下,表示命令运行成功。
```
Run successfully with command - python3.7 pipeline_http_client.py > ../../test_tipc/output/ResNet50/server_infer_gpu_pipeline_http_batchsize_1.log 2>&1!
Run successfully with command - python3.7 pipeline_http_client.py > ../../test_tipc/output/ResNet50/server_infer_cpu_pipeline_http_batchsize_1.log 2>&1 !
Run successfully with command - PPLCNet_x1_0 - python3.7 test_cpp_serving_client.py > ../../test_tipc/output/PPLCNet_x1_0/server_infer_cpp_gpu_pipeline_batchsize_1.log 2>&1 !
Run successfully with command - PPLCNet_x1_0 - python3.7 test_cpp_serving_client.py > ../../test_tipc/output/PPLCNet_x1_0/server_infer_cpp_cpu_pipeline_batchsize_1.log 2>&1 !
```
预测结果会自动保存在 `./test_tipc/output/ResNet50/server_infer_gpu_pipeline_http_batchsize_1.log` ,可以看到 PaddleServing 的运行结果:
预测结果会自动保存在 `./test_tipc/output/PPLCNet_x1_0/server_infer_gpu_pipeline_http_batchsize_1.log` ,可以看到 PaddleServing 的运行结果:
```
{'err_no': 0, 'err_msg': '', 'key': ['label', 'prob'], 'value': ["['daisy']", '[0.998314619064331]']}
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0612 09:55:16.109890 38303 naming_service_thread.cpp:202] brpc::policy::ListNamingService("127.0.0.1:9292"): added 1
I0612 09:55:16.172924 38303 general_model.cpp:490] [client]logid=0,client_cost=60.772ms,server_cost=57.6ms.
prediction: daisy, probability: 0.9099399447441101
0.06275796890258789
```

View File

@ -204,7 +204,9 @@ if [[ ${MODE} = "serving_infer" ]]; then
${python_name} -m pip install paddle-serving-app==0.9.0 -i https://pypi.tuna.tsinghua.edu.cn/simple
python_name=$(func_parser_value "${lines[2]}")
if [[ ${FILENAME} =~ "cpp" ] && [ ${model_name} =~ "ShiTu" ]]; then
pushd ./deploy/paddleserving
bash build_server.sh ${python_name}
popd
else
${python_name} -m pip install install paddle-serving-server-gpu==0.9.0.post101 -i https://pypi.tuna.tsinghua.edu.cn/simple
fi

View File

@ -263,13 +263,13 @@ function func_serving_rec(){
det_trans_model_cmd="${python_interp} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}"
eval $det_trans_model_cmd
cp_prototxt_cmd="cp ./paddleserving/preprocess/general_PPLCNet_x2_5_lite_v1.0_serving/*.prototxt ${cls_serving_server_value}"
cp_prototxt_cmd="cp ./paddleserving/recognition/preprocess/general_PPLCNet_x2_5_lite_v1.0_serving/*.prototxt ${cls_serving_server_value}"
eval ${cp_prototxt_cmd}
cp_prototxt_cmd="cp ./paddleserving/preprocess/general_PPLCNet_x2_5_lite_v1.0_client/*.prototxt ${cls_serving_client_value}"
cp_prototxt_cmd="cp ./paddleserving/recognition/preprocess/general_PPLCNet_x2_5_lite_v1.0_client/*.prototxt ${cls_serving_client_value}"
eval ${cp_prototxt_cmd}
cp_prototxt_cmd="cp ./paddleserving/preprocess/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_client/*.prototxt ${det_serving_client_value}"
cp_prototxt_cmd="cp ./paddleserving/recognition/preprocess/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_client/*.prototxt ${det_serving_client_value}"
eval ${cp_prototxt_cmd}
cp_prototxt_cmd="cp ./paddleserving/preprocess/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_serving/*.prototxt ${det_serving_server_value}"
cp_prototxt_cmd="cp ./paddleserving/recognition/preprocess/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_serving/*.prototxt ${det_serving_server_value}"
eval ${cp_prototxt_cmd}
prototxt_dataline=$(awk 'NR==1, NR==3{print}' ${cls_serving_server_value}/serving_server_conf.prototxt)