From 1a9d6229fd39e81aad4b01344234be3e74be48d1 Mon Sep 17 00:00:00 2001 From: gaotingquan <gaotingquan@baidu.com> Date: Sun, 13 Mar 2022 08:42:16 +0000 Subject: [PATCH 1/3] support clas, rec, search --- .../ppshitu_v2/configs/test_cls_config.yaml | 38 +++++++++++ .../ppshitu_v2/configs/test_det_config.yaml | 33 +++++++++ .../ppshitu_v2/configs/test_rec_config.yaml | 34 ++++++++++ .../configs/test_search_config.yaml | 16 +++++ deploy/python/ppshitu_v2/examples/predict.py | 12 +++- .../python/ppshitu_v2/examples/test_search.py | 31 +++++++++ .../ppshitu_v2/processor/algo_mod/__init__.py | 35 +++++++--- .../algo_mod/postprocessor/__init__.py | 11 +-- .../algo_mod/postprocessor/classification.py | 68 +++++++++++++++++++ .../processor/algo_mod/postprocessor/rec.py | 16 +++++ .../processor/algo_mod/predictor/__init__.py | 9 ++- .../algo_mod/predictor/paddle_predictor.py | 23 +++++-- .../algo_mod/preprocessor/__init__.py | 9 ++- .../processor/algo_mod/searcher/__init__.py | 23 ++++++- 14 files changed, 323 insertions(+), 35 deletions(-) create mode 100644 deploy/python/ppshitu_v2/configs/test_cls_config.yaml create mode 100644 deploy/python/ppshitu_v2/configs/test_det_config.yaml create mode 100644 deploy/python/ppshitu_v2/configs/test_rec_config.yaml create mode 100644 deploy/python/ppshitu_v2/configs/test_search_config.yaml create mode 100644 deploy/python/ppshitu_v2/examples/test_search.py create mode 100644 deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py create mode 100644 deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/rec.py diff --git a/deploy/python/ppshitu_v2/configs/test_cls_config.yaml b/deploy/python/ppshitu_v2/configs/test_cls_config.yaml new file mode 100644 index 000000000..2b139a849 --- /dev/null +++ b/deploy/python/ppshitu_v2/configs/test_cls_config.yaml @@ -0,0 +1,38 @@ +Global: + Engine: POPEngine + infer_imgs: "../../images/wangzai.jpg" + + +Modules: + - name: + type: AlgoMod + processors: + - name: ImageProcessor + type: preprocessor + ops: + - ResizeImage: + resize_short: 256 + - CropImage: + size: 224 + - NormalizeImage: + scale: 0.00392157 + mean: [0.485, 0.456, 0.406] + std: [0.229, 0.224, 0.225] + order: hwc + - ToCHWImage: + - GetShapeInfo: + configs: + order: chw + - ToBatch: + - name: PaddlePredictor + type: predictor + inference_model_dir: "./MobileNetV2_infer" + input_names: + inputs: image + output_names: + save_infer_model/scale_0.tmp_1: logits + - name: TopK + type: postprocessor + k: 10 + class_id_map_file: "../ppcls/utils/imagenet1k_label_list.txt" + save_dir: None \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/configs/test_det_config.yaml b/deploy/python/ppshitu_v2/configs/test_det_config.yaml new file mode 100644 index 000000000..64a421fae --- /dev/null +++ b/deploy/python/ppshitu_v2/configs/test_det_config.yaml @@ -0,0 +1,33 @@ +Global: + Engine: POPEngine + infer_imgs: "../../images/wangzai.jpg" + +Modules: + - name: + type: AlgoMod + processors: + - name: ImageProcessor + type: preprocessor + ops: + - ResizeImage: + size: [640, 640] + interpolation: 2 + - NormalizeImage: + scale: 0.00392157 + mean: [0.485, 0.456, 0.406] + std: [0.229, 0.224, 0.225] + order: hwc + - ToCHWImage: + - GetShapeInfo: + configs: + order: chw + - ToBatch: + - name: PaddlePredictor + type: predictor + inference_model_dir: ./models/ppyolov2_r50vd_dcn_mainbody_v1.0_infer/ + - name: DetPostPro + type: postprocessor + threshold: 0.2 + max_det_results: 1 + label_list: + - foreground \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/configs/test_rec_config.yaml b/deploy/python/ppshitu_v2/configs/test_rec_config.yaml new file mode 100644 index 000000000..7ce8e4681 --- /dev/null +++ b/deploy/python/ppshitu_v2/configs/test_rec_config.yaml @@ -0,0 +1,34 @@ +Global: + Engine: POPEngine + infer_imgs: "../../images/wangzai.jpg" + +Modules: + - name: + type: AlgoMod + processors: + - name: ImageProcessor + type: preprocessor + ops: + - ResizeImage: + resize_short: 256 + - CropImage: + size: 224 + - NormalizeImage: + scale: 0.00392157 + mean: [0.485, 0.456, 0.406] + std: [0.229, 0.224, 0.225] + order: hwc + - ToCHWImage: + - GetShapeInfo: + configs: + order: chw + - ToBatch: + - name: PaddlePredictor + type: predictor + inference_model_dir: models/product_ResNet50_vd_aliproduct_v1.0_infer + input_names: + x: image + output_names: + save_infer_model/scale_0.tmp_1: features + - name: FeatureNormalizer + type: postprocessor \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/configs/test_search_config.yaml b/deploy/python/ppshitu_v2/configs/test_search_config.yaml new file mode 100644 index 000000000..eaf8486ab --- /dev/null +++ b/deploy/python/ppshitu_v2/configs/test_search_config.yaml @@ -0,0 +1,16 @@ +Global: + Engine: POPEngine + infer_imgs: "./vector.npy" + +Modules: + - name: + type: AlgoMod + processors: + - name: Searcher + type: searcher + index_dir: "./index" + dist_type: "IP" + embedding_size: 512 + batch_size: 32 + return_k: 5 + score_thres: 0.5 \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/examples/predict.py b/deploy/python/ppshitu_v2/examples/predict.py index 0379808dc..3790ec988 100644 --- a/deploy/python/ppshitu_v2/examples/predict.py +++ b/deploy/python/ppshitu_v2/examples/predict.py @@ -18,8 +18,16 @@ def main(): image_file = "../../images/wangzai.jpg" img = cv2.imread(image_file)[:, :, ::-1] input_data = {"input_image": img} - output = engine.process(input_data) - print(output) + data = engine.process(input_data) + + # for det, cls + # print(data) + + # for rec + # features = data["pred"]["features"] + # print(features) + # print(features.shape) + # print(type(features)) if __name__ == '__main__': diff --git a/deploy/python/ppshitu_v2/examples/test_search.py b/deploy/python/ppshitu_v2/examples/test_search.py new file mode 100644 index 000000000..11b36df73 --- /dev/null +++ b/deploy/python/ppshitu_v2/examples/test_search.py @@ -0,0 +1,31 @@ +import os +import sys +__dir__ = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(os.path.abspath(os.path.join(__dir__, '../'))) + +import cv2 + +from engine import build_engine +from utils import config +from utils.get_image_list import get_image_list + +import numpy as np + + +def load_vector(path): + return np.load(path) + + +def main(): + args = config.parse_args() + config_dict = config.get_config( + args.config, overrides=args.override, show=False) + config_dict.profiler_options = args.profiler_options + engine = build_engine(config_dict) + vector = load_vector(config_dict["Global"]["infer_imgs"]) + output = engine.process({"features": vector}) + print(output["search_res"]) + + +if __name__ == '__main__': + main() diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/__init__.py index c4672b9ee..d5b09fe6f 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/__init__.py @@ -1,6 +1,13 @@ -from .postprocessor import build_postprocessor -from .preprocessor import build_preprocessor -from .predictor import build_predictor +# from .postprocessor import build_postprocessor +# from .preprocessor import build_preprocessor +# from .predictor import build_predictor + +import importlib + +from processor.algo_mod import preprocessor +from processor.algo_mod import predictor +from processor.algo_mod import postprocessor +from processor.algo_mod import searcher from ..base_processor import BaseProcessor @@ -10,14 +17,20 @@ class AlgoMod(BaseProcessor): self.processors = [] for processor_config in config["processors"]: processor_type = processor_config.get("type") - if processor_type == "preprocessor": - processor = build_preprocessor(processor_config) - elif processor_type == "predictor": - processor = build_predictor(processor_config) - elif processor_type == "postprocessor": - processor = build_postprocessor(processor_config) - else: - raise NotImplemented("processor type {} unknown.".format(processor_type)) + processor_name = processor_config.get("name") + _mod = importlib.import_module(__name__) + processor = getattr( + getattr(_mod, processor_type), + processor_name)(processor_config) + + # if processor_type == "preprocessor": + # processor = build_preprocessor(processor_config) + # elif processor_type == "predictor": + # processor = build_predictor(processor_config) + # elif processor_type == "postprocessor": + # processor = build_postprocessor(processor_config) + # else: + # raise NotImplemented("processor type {} unknown.".format(processor_type)) self.processors.append(processor) def process(self, input_data): diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py index 89500a7d4..6567e6d0c 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py @@ -1,9 +1,10 @@ import importlib +from .classification import TopK from .det import DetPostPro +from .rec import FeatureNormalizer - -def build_postprocessor(config): - processor_mod = importlib.import_module(__name__) - processor_name = config.get("name") - return getattr(processor_mod, processor_name)(config) +# def build_postprocessor(config): +# processor_mod = importlib.import_module(__name__) +# processor_name = config.get("name") +# return getattr(processor_mod, processor_name)(config) diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py new file mode 100644 index 000000000..6ca945f70 --- /dev/null +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py @@ -0,0 +1,68 @@ +import os + +import numpy as np + +from ...base_processor import BaseProcessor + + +class TopK(BaseProcessor): + def __init__(self, config): + self.topk = config["k"] + assert isinstance(self.topk, (int, )) + + class_id_map_file = config["class_id_map_file"] + self.class_id_map = self.parse_class_id_map(class_id_map_file) + + self.multilabel = config.get("multilabel", False) + + def parse_class_id_map(self, class_id_map_file): + if class_id_map_file is None: + return None + + if not os.path.exists(class_id_map_file): + print( + "Warning: If want to use your own label_dict, please input legal path!\nOtherwise label_names will be empty!" + ) + return None + + try: + class_id_map = {} + with open(class_id_map_file, "r") as fin: + lines = fin.readlines() + for line in lines: + partition = line.split("\n")[0].partition(" ") + class_id_map[int(partition[0])] = str(partition[-1]) + except Exception as ex: + print(ex) + class_id_map = None + return class_id_map + + def process(self, data): + x = data["pred"]["logits"] + # TODO(gaotingquan): support file_name + # if file_names is not None: + # assert x.shape[0] == len(file_names) + y = [] + for idx, probs in enumerate(x): + index = probs.argsort(axis=0)[-self.topk:][::-1].astype( + "int32") if not self.multilabel else np.where( + probs >= 0.5)[0].astype("int32") + clas_id_list = [] + score_list = [] + label_name_list = [] + for i in index: + clas_id_list.append(i.item()) + score_list.append(probs[i].item()) + if self.class_id_map is not None: + label_name_list.append(self.class_id_map[i.item()]) + result = { + "class_ids": clas_id_list, + "scores": np.around( + score_list, decimals=5).tolist(), + } + # if file_names is not None: + # result["file_name"] = file_names[idx] + if label_name_list is not None: + result["label_names"] = label_name_list + y.append(result) + return y diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/rec.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/rec.py new file mode 100644 index 000000000..53c197fdb --- /dev/null +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/rec.py @@ -0,0 +1,16 @@ +import numpy as np + +from ...base_processor import BaseProcessor + + +class FeatureNormalizer(BaseProcessor): + def __init__(self, config=None): + pass + + def process(self, data): + batch_output = data["pred"]["features"] + feas_norm = np.sqrt( + np.sum(np.square(batch_output), axis=1, keepdims=True)) + batch_output = np.divide(batch_output, feas_norm) + data["pred"]["features"] = batch_output + return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py index 2913771b3..4e27b1176 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py @@ -3,8 +3,7 @@ import importlib from processor.algo_mod.predictor.paddle_predictor import PaddlePredictor from processor.algo_mod.predictor.onnx_predictor import ONNXPredictor - -def build_predictor(config): - processor_mod = importlib.import_module(__name__) - processor_name = config.get("name") - return getattr(processor_mod, processor_name)(config) +# def build_predictor(config): +# processor_mod = importlib.import_module(__name__) +# processor_name = config.get("name") +# return getattr(processor_mod, processor_name)(config) diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py index b4a248616..d8c61e930 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py @@ -48,17 +48,30 @@ class PaddlePredictor(BaseProcessor): paddle_config.switch_use_feed_fetch_ops(False) self.predictor = create_predictor(paddle_config) - def process(self, input_data): + if "input_names" in config and config["input_names"]: + self.input_name_mapping = config["input_names"] + else: + self.input_name_mapping = [] + if "output_names" in config and config["output_names"]: + self.output_name_mapping = config["output_names"] + else: + self.output_name_mapping = [] + + def process(self, data): input_names = self.predictor.get_input_names() for input_name in input_names: input_tensor = self.predictor.get_input_handle(input_name) - input_tensor.copy_from_cpu(input_data[input_name]) + name = self.input_name_mapping[ + input_name] if input_name in self.input_name_mapping else input_name + input_tensor.copy_from_cpu(data[name]) self.predictor.run() output_data = {} output_names = self.predictor.get_output_names() for output_name in output_names: output = self.predictor.get_output_handle(output_name) - output_data[output_name] = output.copy_to_cpu() - input_data["pred"] = output_data - return input_data + name = self.output_name_mapping[ + output_name] if output_name in self.output_name_mapping else output_name + output_data[name] = output.copy_to_cpu() + data["pred"] = output_data + return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py index ffc9efde8..94b889f43 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py @@ -2,8 +2,7 @@ import importlib from processor.algo_mod.preprocessor.image_processor import ImageProcessor - -def build_preprocessor(config): - processor_mod = importlib.import_module(__name__) - processor_name = config.get("name") - return getattr(processor_mod, processor_name)(config) +# def build_preprocessor(config): +# processor_mod = importlib.import_module(__name__) +# processor_name = config.get("name") +# return getattr(processor_mod, processor_name)(config) diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py index 28849db7a..f986738cf 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py @@ -1,4 +1,23 @@ +import os +import pickle + +import faiss -def build_searcher(config): - pass +class Searcher: + def __init__(self, config): + super().__init__() + + self.Searcher = faiss.read_index( + os.path.join(config["index_dir"], "vector.index")) + + with open(os.path.join(config["index_dir"], "id_map.pkl"), "rb") as fd: + self.id_map = pickle.load(fd) + + self.return_k = config["return_k"] + + def process(self, data): + features = data["features"] + scores, docs = self.Searcher.search(features, self.return_k) + data["search_res"] = (scores, docs) + return data From 6d2de979d6d926cb5757aa40dd8cc39415f1a8d5 Mon Sep 17 00:00:00 2001 From: gaotingquan <gaotingquan@baidu.com> Date: Mon, 14 Mar 2022 13:30:57 +0000 Subject: [PATCH 2/3] fix --- .../ppshitu_v2/configs/test_cls_config.yaml | 8 +-- .../ppshitu_v2/configs/test_rec_config.yaml | 8 +-- deploy/python/ppshitu_v2/examples/predict.py | 20 ++++--- .../ppshitu_v2/processor/algo_mod/__init__.py | 38 +++++------- .../algo_mod/postprocessor/__init__.py | 9 +-- .../algo_mod/postprocessor/classification.py | 58 +++++++++---------- .../processor/algo_mod/postprocessor/det.py | 47 ++++++++------- .../processor/algo_mod/predictor/__init__.py | 9 +-- .../algo_mod/predictor/paddle_predictor.py | 34 +++++++---- .../algo_mod/preprocessor/__init__.py | 9 +-- .../processor/algo_mod/searcher/__init__.py | 15 ++++- 11 files changed, 139 insertions(+), 116 deletions(-) diff --git a/deploy/python/ppshitu_v2/configs/test_cls_config.yaml b/deploy/python/ppshitu_v2/configs/test_cls_config.yaml index 2b139a849..7b18e9a08 100644 --- a/deploy/python/ppshitu_v2/configs/test_cls_config.yaml +++ b/deploy/python/ppshitu_v2/configs/test_cls_config.yaml @@ -27,10 +27,10 @@ Modules: - name: PaddlePredictor type: predictor inference_model_dir: "./MobileNetV2_infer" - input_names: - inputs: image - output_names: - save_infer_model/scale_0.tmp_1: logits + to_model_names: + image: inputs + from_model_names: + logits: 0 - name: TopK type: postprocessor k: 10 diff --git a/deploy/python/ppshitu_v2/configs/test_rec_config.yaml b/deploy/python/ppshitu_v2/configs/test_rec_config.yaml index 7ce8e4681..1c986bd80 100644 --- a/deploy/python/ppshitu_v2/configs/test_rec_config.yaml +++ b/deploy/python/ppshitu_v2/configs/test_rec_config.yaml @@ -26,9 +26,9 @@ Modules: - name: PaddlePredictor type: predictor inference_model_dir: models/product_ResNet50_vd_aliproduct_v1.0_infer - input_names: - x: image - output_names: - save_infer_model/scale_0.tmp_1: features + to_model_names: + image: x + from_model_names: + features: 0 - name: FeatureNormalizer type: postprocessor \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/examples/predict.py b/deploy/python/ppshitu_v2/examples/predict.py index 3790ec988..19d02406d 100644 --- a/deploy/python/ppshitu_v2/examples/predict.py +++ b/deploy/python/ppshitu_v2/examples/predict.py @@ -20,14 +20,20 @@ def main(): input_data = {"input_image": img} data = engine.process(input_data) - # for det, cls - # print(data) - + # for cls + if "classification_res" in data: + print(data["classification_res"]) + # for det + elif "detection_res" in data: + print(data["detection_res"]) # for rec - # features = data["pred"]["features"] - # print(features) - # print(features.shape) - # print(type(features)) + elif "features" in data["pred"]: + features = data["pred"]["features"] + print(features) + print(features.shape) + print(type(features)) + else: + print("ERROR") if __name__ == '__main__': diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/__init__.py index d5b09fe6f..57f82899c 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/__init__.py @@ -1,13 +1,7 @@ -# from .postprocessor import build_postprocessor -# from .preprocessor import build_preprocessor -# from .predictor import build_predictor - -import importlib - -from processor.algo_mod import preprocessor -from processor.algo_mod import predictor -from processor.algo_mod import postprocessor -from processor.algo_mod import searcher +from .postprocessor import build_postprocessor +from .preprocessor import build_preprocessor +from .predictor import build_predictor +from .searcher import build_searcher from ..base_processor import BaseProcessor @@ -17,20 +11,18 @@ class AlgoMod(BaseProcessor): self.processors = [] for processor_config in config["processors"]: processor_type = processor_config.get("type") - processor_name = processor_config.get("name") - _mod = importlib.import_module(__name__) - processor = getattr( - getattr(_mod, processor_type), - processor_name)(processor_config) - # if processor_type == "preprocessor": - # processor = build_preprocessor(processor_config) - # elif processor_type == "predictor": - # processor = build_predictor(processor_config) - # elif processor_type == "postprocessor": - # processor = build_postprocessor(processor_config) - # else: - # raise NotImplemented("processor type {} unknown.".format(processor_type)) + if processor_type == "preprocessor": + processor = build_preprocessor(processor_config) + elif processor_type == "predictor": + processor = build_predictor(processor_config) + elif processor_type == "postprocessor": + processor = build_postprocessor(processor_config) + elif processor_type == "searcher": + processor = build_searcher(processor_config) + else: + raise NotImplemented("processor type {} unknown.".format( + processor_type)) self.processors.append(processor) def process(self, input_data): diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py index 6567e6d0c..9edb322b5 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py @@ -4,7 +4,8 @@ from .classification import TopK from .det import DetPostPro from .rec import FeatureNormalizer -# def build_postprocessor(config): -# processor_mod = importlib.import_module(__name__) -# processor_name = config.get("name") -# return getattr(processor_mod, processor_name)(config) + +def build_postprocessor(config): + processor_mod = importlib.import_module(__name__) + processor_name = config.get("name") + return getattr(processor_mod, processor_name)(config) diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py index 6ca945f70..4c15b957f 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py @@ -2,6 +2,7 @@ import os import numpy as np +from utils import logger from ...base_processor import BaseProcessor @@ -20,8 +21,8 @@ class TopK(BaseProcessor): return None if not os.path.exists(class_id_map_file): - print( - "Warning: If want to use your own label_dict, please input legal path!\nOtherwise label_names will be empty!" + logger.warning( + "[Classification] If want to use your own label_dict, please input legal path!\nOtherwise label_names will be empty!" ) return None @@ -33,36 +34,31 @@ class TopK(BaseProcessor): partition = line.split("\n")[0].partition(" ") class_id_map[int(partition[0])] = str(partition[-1]) except Exception as ex: - print(ex) + logger.warning(f"[Classification] {ex}") class_id_map = None return class_id_map def process(self, data): - x = data["pred"]["logits"] - # TODO(gaotingquan): support file_name - # if file_names is not None: - # assert x.shape[0] == len(file_names) - y = [] - for idx, probs in enumerate(x): - index = probs.argsort(axis=0)[-self.topk:][::-1].astype( - "int32") if not self.multilabel else np.where( - probs >= 0.5)[0].astype("int32") - clas_id_list = [] - score_list = [] - label_name_list = [] - for i in index: - clas_id_list.append(i.item()) - score_list.append(probs[i].item()) - if self.class_id_map is not None: - label_name_list.append(self.class_id_map[i.item()]) - result = { - "class_ids": clas_id_list, - "scores": np.around( - score_list, decimals=5).tolist(), - } - # if file_names is not None: - # result["file_name"] = file_names[idx] - if label_name_list is not None: - result["label_names"] = label_name_list - y.append(result) - return y + # TODO(gaotingquan): only support bs==1 when 'connector' is not implemented. + probs = data["pred"]["logits"][0] + index = probs.argsort(axis=0)[-self.topk:][::-1].astype( + "int32") if not self.multilabel else np.where( + probs >= 0.5)[0].astype("int32") + clas_id_list = [] + score_list = [] + label_name_list = [] + for i in index: + clas_id_list.append(i.item()) + score_list.append(probs[i].item()) + if self.class_id_map is not None: + label_name_list.append(self.class_id_map[i.item()]) + result = { + "class_ids": clas_id_list, + "scores": np.around( + score_list, decimals=5).tolist(), + } + if label_name_list is not None: + result["label_names"] = label_name_list + + data["classification_res"] = result + return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py index c69a9cf17..587430637 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py @@ -11,27 +11,34 @@ class DetPostPro(BaseProcessor): self.label_list = config["label_list"] self.max_det_results = config["max_det_results"] - def process(self, input_data): - pred = input_data["pred"] + def process(self, data): + pred = data["pred"] np_boxes = pred[list(pred.keys())[0]] - if reduce(lambda x, y: x * y, np_boxes.shape) < 6: - logger.warning('[Detector] No object detected.') - np_boxes = np.array([]) - - keep_indexes = np_boxes[:, 1].argsort()[::-1][:self.max_det_results] - results = [] - for idx in keep_indexes: - single_res = np_boxes[idx] + if reduce(lambda x, y: x * y, np_boxes.shape) >= 6: + keep_indexes = np_boxes[:, 1].argsort()[::-1][: + self.max_det_results] + # TODO(gaotingquan): only support bs==1 + single_res = np_boxes[0] class_id = int(single_res[0]) score = single_res[1] bbox = single_res[2:] - if score < self.threshold: - continue - label_name = self.label_list[class_id] - results.append({ - "class_id": class_id, - "score": score, - "bbox": bbox, - "label_name": label_name, - }) - return results + if score > self.threshold: + label_name = self.label_list[class_id] + results = { + "class_id": class_id, + "score": score, + "bbox": bbox, + "label_name": label_name, + } + data["detection_res"] = results + return data + + logger.warning('[Detector] No object detected.') + results = { + "class_id": None, + "score": None, + "bbox": None, + "label_name": None, + } + data["detection_res"] = results + return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py index 4e27b1176..2913771b3 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py @@ -3,7 +3,8 @@ import importlib from processor.algo_mod.predictor.paddle_predictor import PaddlePredictor from processor.algo_mod.predictor.onnx_predictor import ONNXPredictor -# def build_predictor(config): -# processor_mod = importlib.import_module(__name__) -# processor_name = config.get("name") -# return getattr(processor_mod, processor_name)(config) + +def build_predictor(config): + processor_mod = importlib.import_module(__name__) + processor_name = config.get("name") + return getattr(processor_mod, processor_name)(config) diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py index d8c61e930..0a10a4434 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py @@ -48,30 +48,40 @@ class PaddlePredictor(BaseProcessor): paddle_config.switch_use_feed_fetch_ops(False) self.predictor = create_predictor(paddle_config) - if "input_names" in config and config["input_names"]: - self.input_name_mapping = config["input_names"] + if "to_model_names" in config and config["to_model_names"]: + self.input_name_map = { + v: k + for k, v in config["to_model_names"].items() + } else: - self.input_name_mapping = [] - if "output_names" in config and config["output_names"]: - self.output_name_mapping = config["output_names"] + self.input_name_map = {} + if "from_model_names" in config and config["from_model_names"]: + self.output_name_map = config["from_model_names"] else: - self.output_name_mapping = [] + self.output_name_map = {} def process(self, data): input_names = self.predictor.get_input_names() for input_name in input_names: input_tensor = self.predictor.get_input_handle(input_name) - name = self.input_name_mapping[ - input_name] if input_name in self.input_name_mapping else input_name + name = self.input_name_map[ + input_name] if input_name in self.input_name_map else input_name input_tensor.copy_from_cpu(data[name]) self.predictor.run() - output_data = {} + model_output = [] output_names = self.predictor.get_output_names() for output_name in output_names: output = self.predictor.get_output_handle(output_name) - name = self.output_name_mapping[ - output_name] if output_name in self.output_name_mapping else output_name - output_data[name] = output.copy_to_cpu() + model_output.append((output_name, output.copy_to_cpu())) + + if self.output_name_map: + output_data = {} + for name in self.output_name_map: + idx = self.output_name_map[name] + output_data[name] = model_output[idx][1] + else: + output_data = dict(model_output) + data["pred"] = output_data return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py index 94b889f43..ffc9efde8 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py @@ -2,7 +2,8 @@ import importlib from processor.algo_mod.preprocessor.image_processor import ImageProcessor -# def build_preprocessor(config): -# processor_mod = importlib.import_module(__name__) -# processor_name = config.get("name") -# return getattr(processor_mod, processor_name)(config) + +def build_preprocessor(config): + processor_mod = importlib.import_module(__name__) + processor_name = config.get("name") + return getattr(processor_mod, processor_name)(config) diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py b/deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py index f986738cf..6bc378c8f 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py @@ -4,11 +4,15 @@ import pickle import faiss +def build_searcher(config): + return Searcher(config) + + class Searcher: def __init__(self, config): super().__init__() - self.Searcher = faiss.read_index( + self.faiss_searcher = faiss.read_index( os.path.join(config["index_dir"], "vector.index")) with open(os.path.join(config["index_dir"], "id_map.pkl"), "rb") as fd: @@ -18,6 +22,11 @@ class Searcher: def process(self, data): features = data["features"] - scores, docs = self.Searcher.search(features, self.return_k) - data["search_res"] = (scores, docs) + scores, docs = self.faiss_searcher.search(features, self.return_k) + + preds = {} + preds["rec_docs"] = self.id_map[docs[0][0]].split()[1] + preds["rec_scores"] = scores[0][0] + + data["search_res"] = preds return data From 4386b8418dea02ff644a40911282b3101cb4b2c4 Mon Sep 17 00:00:00 2001 From: gaotingquan <gaotingquan@baidu.com> Date: Tue, 15 Mar 2022 07:41:42 +0000 Subject: [PATCH 3/3] fix: support bs>1 --- .../ppshitu_v2/configs/test_cls_config.yaml | 4 +- .../ppshitu_v2/configs/test_det_config.yaml | 2 + .../ppshitu_v2/configs/test_rec_config.yaml | 2 +- .../algo_mod/postprocessor/classification.py | 44 ++++++++++--------- .../processor/algo_mod/postprocessor/det.py | 36 +++++++-------- .../algo_mod/predictor/paddle_predictor.py | 19 +++----- 6 files changed, 51 insertions(+), 56 deletions(-) diff --git a/deploy/python/ppshitu_v2/configs/test_cls_config.yaml b/deploy/python/ppshitu_v2/configs/test_cls_config.yaml index 7b18e9a08..9a9a167b8 100644 --- a/deploy/python/ppshitu_v2/configs/test_cls_config.yaml +++ b/deploy/python/ppshitu_v2/configs/test_cls_config.yaml @@ -29,10 +29,10 @@ Modules: inference_model_dir: "./MobileNetV2_infer" to_model_names: image: inputs - from_model_names: + from_model_indexes: logits: 0 - name: TopK type: postprocessor k: 10 - class_id_map_file: "../ppcls/utils/imagenet1k_label_list.txt" + class_id_map_file: "../../../ppcls/utils/imagenet1k_label_list.txt" save_dir: None \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/configs/test_det_config.yaml b/deploy/python/ppshitu_v2/configs/test_det_config.yaml index 64a421fae..7a46dd41e 100644 --- a/deploy/python/ppshitu_v2/configs/test_det_config.yaml +++ b/deploy/python/ppshitu_v2/configs/test_det_config.yaml @@ -25,6 +25,8 @@ Modules: - name: PaddlePredictor type: predictor inference_model_dir: ./models/ppyolov2_r50vd_dcn_mainbody_v1.0_infer/ + from_model_indexes: + boxes: 0 - name: DetPostPro type: postprocessor threshold: 0.2 diff --git a/deploy/python/ppshitu_v2/configs/test_rec_config.yaml b/deploy/python/ppshitu_v2/configs/test_rec_config.yaml index 1c986bd80..ba826faec 100644 --- a/deploy/python/ppshitu_v2/configs/test_rec_config.yaml +++ b/deploy/python/ppshitu_v2/configs/test_rec_config.yaml @@ -28,7 +28,7 @@ Modules: inference_model_dir: models/product_ResNet50_vd_aliproduct_v1.0_infer to_model_names: image: x - from_model_names: + from_model_indexes: features: 0 - name: FeatureNormalizer type: postprocessor \ No newline at end of file diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py index 4c15b957f..333657512 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py @@ -39,26 +39,28 @@ class TopK(BaseProcessor): return class_id_map def process(self, data): - # TODO(gaotingquan): only support bs==1 when 'connector' is not implemented. - probs = data["pred"]["logits"][0] - index = probs.argsort(axis=0)[-self.topk:][::-1].astype( - "int32") if not self.multilabel else np.where( - probs >= 0.5)[0].astype("int32") - clas_id_list = [] - score_list = [] - label_name_list = [] - for i in index: - clas_id_list.append(i.item()) - score_list.append(probs[i].item()) - if self.class_id_map is not None: - label_name_list.append(self.class_id_map[i.item()]) - result = { - "class_ids": clas_id_list, - "scores": np.around( - score_list, decimals=5).tolist(), - } - if label_name_list is not None: - result["label_names"] = label_name_list + logits = data["pred"]["logits"] + all_results = [] + for probs in logits: + index = probs.argsort(axis=0)[-self.topk:][::-1].astype( + "int32") if not self.multilabel else np.where( + probs >= 0.5)[0].astype("int32") + clas_id_list = [] + score_list = [] + label_name_list = [] + for i in index: + clas_id_list.append(i.item()) + score_list.append(probs[i].item()) + if self.class_id_map is not None: + label_name_list.append(self.class_id_map[i.item()]) + result = { + "class_ids": clas_id_list, + "scores": np.around( + score_list, decimals=5).tolist(), + } + if label_name_list is not None: + result["label_names"] = label_name_list + all_results.append(result) - data["classification_res"] = result + data["classification_res"] = all_results return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py index 587430637..5e7792b78 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py @@ -12,33 +12,29 @@ class DetPostPro(BaseProcessor): self.max_det_results = config["max_det_results"] def process(self, data): - pred = data["pred"] - np_boxes = pred[list(pred.keys())[0]] + np_boxes = data["pred"]["boxes"] if reduce(lambda x, y: x * y, np_boxes.shape) >= 6: keep_indexes = np_boxes[:, 1].argsort()[::-1][: self.max_det_results] - # TODO(gaotingquan): only support bs==1 - single_res = np_boxes[0] - class_id = int(single_res[0]) - score = single_res[1] - bbox = single_res[2:] - if score > self.threshold: + + all_results = [] + for idx in keep_indexes: + single_res = np_boxes[idx] + class_id = int(single_res[0]) + score = single_res[1] + bbox = single_res[2:] + if score < self.threshold: + continue label_name = self.label_list[class_id] - results = { + all_results.append({ "class_id": class_id, "score": score, "bbox": bbox, - "label_name": label_name, - } - data["detection_res"] = results - return data + "label_name": label_name + }) + data["detection_res"] = all_results + return data logger.warning('[Detector] No object detected.') - results = { - "class_id": None, - "score": None, - "bbox": None, - "label_name": None, - } - data["detection_res"] = results + data["detection_res"] = [] return data diff --git a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py index 0a10a4434..ea303f630 100644 --- a/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py +++ b/deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py @@ -55,10 +55,8 @@ class PaddlePredictor(BaseProcessor): } else: self.input_name_map = {} - if "from_model_names" in config and config["from_model_names"]: - self.output_name_map = config["from_model_names"] - else: - self.output_name_map = {} + + self.output_name_map = config["from_model_indexes"] def process(self, data): input_names = self.predictor.get_input_names() @@ -73,15 +71,12 @@ class PaddlePredictor(BaseProcessor): output_names = self.predictor.get_output_names() for output_name in output_names: output = self.predictor.get_output_handle(output_name) - model_output.append((output_name, output.copy_to_cpu())) + model_output.append(output.copy_to_cpu()) - if self.output_name_map: - output_data = {} - for name in self.output_name_map: - idx = self.output_name_map[name] - output_data[name] = model_output[idx][1] - else: - output_data = dict(model_output) + output_data = {} + for name in self.output_name_map: + idx = self.output_name_map[name] + output_data[name] = model_output[idx] data["pred"] = output_data return data