commit
d072db5545
|
@ -0,0 +1,38 @@
|
||||||
|
Global:
|
||||||
|
Engine: POPEngine
|
||||||
|
infer_imgs: "../../images/wangzai.jpg"
|
||||||
|
|
||||||
|
|
||||||
|
Modules:
|
||||||
|
- name:
|
||||||
|
type: AlgoMod
|
||||||
|
processors:
|
||||||
|
- name: ImageProcessor
|
||||||
|
type: preprocessor
|
||||||
|
ops:
|
||||||
|
- ResizeImage:
|
||||||
|
resize_short: 256
|
||||||
|
- CropImage:
|
||||||
|
size: 224
|
||||||
|
- NormalizeImage:
|
||||||
|
scale: 0.00392157
|
||||||
|
mean: [0.485, 0.456, 0.406]
|
||||||
|
std: [0.229, 0.224, 0.225]
|
||||||
|
order: hwc
|
||||||
|
- ToCHWImage:
|
||||||
|
- GetShapeInfo:
|
||||||
|
configs:
|
||||||
|
order: chw
|
||||||
|
- ToBatch:
|
||||||
|
- name: PaddlePredictor
|
||||||
|
type: predictor
|
||||||
|
inference_model_dir: "./MobileNetV2_infer"
|
||||||
|
to_model_names:
|
||||||
|
image: inputs
|
||||||
|
from_model_indexes:
|
||||||
|
logits: 0
|
||||||
|
- name: TopK
|
||||||
|
type: postprocessor
|
||||||
|
k: 10
|
||||||
|
class_id_map_file: "../../../ppcls/utils/imagenet1k_label_list.txt"
|
||||||
|
save_dir: None
|
|
@ -0,0 +1,35 @@
|
||||||
|
Global:
|
||||||
|
Engine: POPEngine
|
||||||
|
infer_imgs: "../../images/wangzai.jpg"
|
||||||
|
|
||||||
|
Modules:
|
||||||
|
- name:
|
||||||
|
type: AlgoMod
|
||||||
|
processors:
|
||||||
|
- name: ImageProcessor
|
||||||
|
type: preprocessor
|
||||||
|
ops:
|
||||||
|
- ResizeImage:
|
||||||
|
size: [640, 640]
|
||||||
|
interpolation: 2
|
||||||
|
- NormalizeImage:
|
||||||
|
scale: 0.00392157
|
||||||
|
mean: [0.485, 0.456, 0.406]
|
||||||
|
std: [0.229, 0.224, 0.225]
|
||||||
|
order: hwc
|
||||||
|
- ToCHWImage:
|
||||||
|
- GetShapeInfo:
|
||||||
|
configs:
|
||||||
|
order: chw
|
||||||
|
- ToBatch:
|
||||||
|
- name: PaddlePredictor
|
||||||
|
type: predictor
|
||||||
|
inference_model_dir: ./models/ppyolov2_r50vd_dcn_mainbody_v1.0_infer/
|
||||||
|
from_model_indexes:
|
||||||
|
boxes: 0
|
||||||
|
- name: DetPostPro
|
||||||
|
type: postprocessor
|
||||||
|
threshold: 0.2
|
||||||
|
max_det_results: 1
|
||||||
|
label_list:
|
||||||
|
- foreground
|
|
@ -0,0 +1,34 @@
|
||||||
|
Global:
|
||||||
|
Engine: POPEngine
|
||||||
|
infer_imgs: "../../images/wangzai.jpg"
|
||||||
|
|
||||||
|
Modules:
|
||||||
|
- name:
|
||||||
|
type: AlgoMod
|
||||||
|
processors:
|
||||||
|
- name: ImageProcessor
|
||||||
|
type: preprocessor
|
||||||
|
ops:
|
||||||
|
- ResizeImage:
|
||||||
|
resize_short: 256
|
||||||
|
- CropImage:
|
||||||
|
size: 224
|
||||||
|
- NormalizeImage:
|
||||||
|
scale: 0.00392157
|
||||||
|
mean: [0.485, 0.456, 0.406]
|
||||||
|
std: [0.229, 0.224, 0.225]
|
||||||
|
order: hwc
|
||||||
|
- ToCHWImage:
|
||||||
|
- GetShapeInfo:
|
||||||
|
configs:
|
||||||
|
order: chw
|
||||||
|
- ToBatch:
|
||||||
|
- name: PaddlePredictor
|
||||||
|
type: predictor
|
||||||
|
inference_model_dir: models/product_ResNet50_vd_aliproduct_v1.0_infer
|
||||||
|
to_model_names:
|
||||||
|
image: x
|
||||||
|
from_model_indexes:
|
||||||
|
features: 0
|
||||||
|
- name: FeatureNormalizer
|
||||||
|
type: postprocessor
|
|
@ -0,0 +1,16 @@
|
||||||
|
Global:
|
||||||
|
Engine: POPEngine
|
||||||
|
infer_imgs: "./vector.npy"
|
||||||
|
|
||||||
|
Modules:
|
||||||
|
- name:
|
||||||
|
type: AlgoMod
|
||||||
|
processors:
|
||||||
|
- name: Searcher
|
||||||
|
type: searcher
|
||||||
|
index_dir: "./index"
|
||||||
|
dist_type: "IP"
|
||||||
|
embedding_size: 512
|
||||||
|
batch_size: 32
|
||||||
|
return_k: 5
|
||||||
|
score_thres: 0.5
|
|
@ -18,8 +18,22 @@ def main():
|
||||||
image_file = "../../images/wangzai.jpg"
|
image_file = "../../images/wangzai.jpg"
|
||||||
img = cv2.imread(image_file)[:, :, ::-1]
|
img = cv2.imread(image_file)[:, :, ::-1]
|
||||||
input_data = {"input_image": img}
|
input_data = {"input_image": img}
|
||||||
output = engine.process(input_data)
|
data = engine.process(input_data)
|
||||||
print(output)
|
|
||||||
|
# for cls
|
||||||
|
if "classification_res" in data:
|
||||||
|
print(data["classification_res"])
|
||||||
|
# for det
|
||||||
|
elif "detection_res" in data:
|
||||||
|
print(data["detection_res"])
|
||||||
|
# for rec
|
||||||
|
elif "features" in data["pred"]:
|
||||||
|
features = data["pred"]["features"]
|
||||||
|
print(features)
|
||||||
|
print(features.shape)
|
||||||
|
print(type(features))
|
||||||
|
else:
|
||||||
|
print("ERROR")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
__dir__ = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
sys.path.append(os.path.abspath(os.path.join(__dir__, '../')))
|
||||||
|
|
||||||
|
import cv2
|
||||||
|
|
||||||
|
from engine import build_engine
|
||||||
|
from utils import config
|
||||||
|
from utils.get_image_list import get_image_list
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
def load_vector(path):
|
||||||
|
return np.load(path)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = config.parse_args()
|
||||||
|
config_dict = config.get_config(
|
||||||
|
args.config, overrides=args.override, show=False)
|
||||||
|
config_dict.profiler_options = args.profiler_options
|
||||||
|
engine = build_engine(config_dict)
|
||||||
|
vector = load_vector(config_dict["Global"]["infer_imgs"])
|
||||||
|
output = engine.process({"features": vector})
|
||||||
|
print(output["search_res"])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -1,6 +1,7 @@
|
||||||
from .postprocessor import build_postprocessor
|
from .postprocessor import build_postprocessor
|
||||||
from .preprocessor import build_preprocessor
|
from .preprocessor import build_preprocessor
|
||||||
from .predictor import build_predictor
|
from .predictor import build_predictor
|
||||||
|
from .searcher import build_searcher
|
||||||
|
|
||||||
from ..base_processor import BaseProcessor
|
from ..base_processor import BaseProcessor
|
||||||
|
|
||||||
|
@ -10,14 +11,18 @@ class AlgoMod(BaseProcessor):
|
||||||
self.processors = []
|
self.processors = []
|
||||||
for processor_config in config["processors"]:
|
for processor_config in config["processors"]:
|
||||||
processor_type = processor_config.get("type")
|
processor_type = processor_config.get("type")
|
||||||
|
|
||||||
if processor_type == "preprocessor":
|
if processor_type == "preprocessor":
|
||||||
processor = build_preprocessor(processor_config)
|
processor = build_preprocessor(processor_config)
|
||||||
elif processor_type == "predictor":
|
elif processor_type == "predictor":
|
||||||
processor = build_predictor(processor_config)
|
processor = build_predictor(processor_config)
|
||||||
elif processor_type == "postprocessor":
|
elif processor_type == "postprocessor":
|
||||||
processor = build_postprocessor(processor_config)
|
processor = build_postprocessor(processor_config)
|
||||||
|
elif processor_type == "searcher":
|
||||||
|
processor = build_searcher(processor_config)
|
||||||
else:
|
else:
|
||||||
raise NotImplemented("processor type {} unknown.".format(processor_type))
|
raise NotImplemented("processor type {} unknown.".format(
|
||||||
|
processor_type))
|
||||||
self.processors.append(processor)
|
self.processors.append(processor)
|
||||||
|
|
||||||
def process(self, input_data):
|
def process(self, input_data):
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
|
from .classification import TopK
|
||||||
from .det import DetPostPro
|
from .det import DetPostPro
|
||||||
|
from .rec import FeatureNormalizer
|
||||||
|
|
||||||
|
|
||||||
def build_postprocessor(config):
|
def build_postprocessor(config):
|
||||||
|
|
|
@ -0,0 +1,66 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from utils import logger
|
||||||
|
from ...base_processor import BaseProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class TopK(BaseProcessor):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.topk = config["k"]
|
||||||
|
assert isinstance(self.topk, (int, ))
|
||||||
|
|
||||||
|
class_id_map_file = config["class_id_map_file"]
|
||||||
|
self.class_id_map = self.parse_class_id_map(class_id_map_file)
|
||||||
|
|
||||||
|
self.multilabel = config.get("multilabel", False)
|
||||||
|
|
||||||
|
def parse_class_id_map(self, class_id_map_file):
|
||||||
|
if class_id_map_file is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not os.path.exists(class_id_map_file):
|
||||||
|
logger.warning(
|
||||||
|
"[Classification] If want to use your own label_dict, please input legal path!\nOtherwise label_names will be empty!"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
class_id_map = {}
|
||||||
|
with open(class_id_map_file, "r") as fin:
|
||||||
|
lines = fin.readlines()
|
||||||
|
for line in lines:
|
||||||
|
partition = line.split("\n")[0].partition(" ")
|
||||||
|
class_id_map[int(partition[0])] = str(partition[-1])
|
||||||
|
except Exception as ex:
|
||||||
|
logger.warning(f"[Classification] {ex}")
|
||||||
|
class_id_map = None
|
||||||
|
return class_id_map
|
||||||
|
|
||||||
|
def process(self, data):
|
||||||
|
logits = data["pred"]["logits"]
|
||||||
|
all_results = []
|
||||||
|
for probs in logits:
|
||||||
|
index = probs.argsort(axis=0)[-self.topk:][::-1].astype(
|
||||||
|
"int32") if not self.multilabel else np.where(
|
||||||
|
probs >= 0.5)[0].astype("int32")
|
||||||
|
clas_id_list = []
|
||||||
|
score_list = []
|
||||||
|
label_name_list = []
|
||||||
|
for i in index:
|
||||||
|
clas_id_list.append(i.item())
|
||||||
|
score_list.append(probs[i].item())
|
||||||
|
if self.class_id_map is not None:
|
||||||
|
label_name_list.append(self.class_id_map[i.item()])
|
||||||
|
result = {
|
||||||
|
"class_ids": clas_id_list,
|
||||||
|
"scores": np.around(
|
||||||
|
score_list, decimals=5).tolist(),
|
||||||
|
}
|
||||||
|
if label_name_list is not None:
|
||||||
|
result["label_names"] = label_name_list
|
||||||
|
all_results.append(result)
|
||||||
|
|
||||||
|
data["classification_res"] = all_results
|
||||||
|
return data
|
|
@ -11,15 +11,13 @@ class DetPostPro(BaseProcessor):
|
||||||
self.label_list = config["label_list"]
|
self.label_list = config["label_list"]
|
||||||
self.max_det_results = config["max_det_results"]
|
self.max_det_results = config["max_det_results"]
|
||||||
|
|
||||||
def process(self, input_data):
|
def process(self, data):
|
||||||
pred = input_data["pred"]
|
np_boxes = data["pred"]["boxes"]
|
||||||
np_boxes = pred[list(pred.keys())[0]]
|
if reduce(lambda x, y: x * y, np_boxes.shape) >= 6:
|
||||||
if reduce(lambda x, y: x * y, np_boxes.shape) < 6:
|
keep_indexes = np_boxes[:, 1].argsort()[::-1][:
|
||||||
logger.warning('[Detector] No object detected.')
|
self.max_det_results]
|
||||||
np_boxes = np.array([])
|
|
||||||
|
|
||||||
keep_indexes = np_boxes[:, 1].argsort()[::-1][:self.max_det_results]
|
all_results = []
|
||||||
results = []
|
|
||||||
for idx in keep_indexes:
|
for idx in keep_indexes:
|
||||||
single_res = np_boxes[idx]
|
single_res = np_boxes[idx]
|
||||||
class_id = int(single_res[0])
|
class_id = int(single_res[0])
|
||||||
|
@ -28,10 +26,15 @@ class DetPostPro(BaseProcessor):
|
||||||
if score < self.threshold:
|
if score < self.threshold:
|
||||||
continue
|
continue
|
||||||
label_name = self.label_list[class_id]
|
label_name = self.label_list[class_id]
|
||||||
results.append({
|
all_results.append({
|
||||||
"class_id": class_id,
|
"class_id": class_id,
|
||||||
"score": score,
|
"score": score,
|
||||||
"bbox": bbox,
|
"bbox": bbox,
|
||||||
"label_name": label_name,
|
"label_name": label_name
|
||||||
})
|
})
|
||||||
return results
|
data["detection_res"] = all_results
|
||||||
|
return data
|
||||||
|
|
||||||
|
logger.warning('[Detector] No object detected.')
|
||||||
|
data["detection_res"] = []
|
||||||
|
return data
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from ...base_processor import BaseProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class FeatureNormalizer(BaseProcessor):
|
||||||
|
def __init__(self, config=None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def process(self, data):
|
||||||
|
batch_output = data["pred"]["features"]
|
||||||
|
feas_norm = np.sqrt(
|
||||||
|
np.sum(np.square(batch_output), axis=1, keepdims=True))
|
||||||
|
batch_output = np.divide(batch_output, feas_norm)
|
||||||
|
data["pred"]["features"] = batch_output
|
||||||
|
return data
|
|
@ -48,17 +48,35 @@ class PaddlePredictor(BaseProcessor):
|
||||||
paddle_config.switch_use_feed_fetch_ops(False)
|
paddle_config.switch_use_feed_fetch_ops(False)
|
||||||
self.predictor = create_predictor(paddle_config)
|
self.predictor = create_predictor(paddle_config)
|
||||||
|
|
||||||
def process(self, input_data):
|
if "to_model_names" in config and config["to_model_names"]:
|
||||||
|
self.input_name_map = {
|
||||||
|
v: k
|
||||||
|
for k, v in config["to_model_names"].items()
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
self.input_name_map = {}
|
||||||
|
|
||||||
|
self.output_name_map = config["from_model_indexes"]
|
||||||
|
|
||||||
|
def process(self, data):
|
||||||
input_names = self.predictor.get_input_names()
|
input_names = self.predictor.get_input_names()
|
||||||
for input_name in input_names:
|
for input_name in input_names:
|
||||||
input_tensor = self.predictor.get_input_handle(input_name)
|
input_tensor = self.predictor.get_input_handle(input_name)
|
||||||
input_tensor.copy_from_cpu(input_data[input_name])
|
name = self.input_name_map[
|
||||||
|
input_name] if input_name in self.input_name_map else input_name
|
||||||
|
input_tensor.copy_from_cpu(data[name])
|
||||||
self.predictor.run()
|
self.predictor.run()
|
||||||
|
|
||||||
output_data = {}
|
model_output = []
|
||||||
output_names = self.predictor.get_output_names()
|
output_names = self.predictor.get_output_names()
|
||||||
for output_name in output_names:
|
for output_name in output_names:
|
||||||
output = self.predictor.get_output_handle(output_name)
|
output = self.predictor.get_output_handle(output_name)
|
||||||
output_data[output_name] = output.copy_to_cpu()
|
model_output.append(output.copy_to_cpu())
|
||||||
input_data["pred"] = output_data
|
|
||||||
return input_data
|
output_data = {}
|
||||||
|
for name in self.output_name_map:
|
||||||
|
idx = self.output_name_map[name]
|
||||||
|
output_data[name] = model_output[idx]
|
||||||
|
|
||||||
|
data["pred"] = output_data
|
||||||
|
return data
|
||||||
|
|
|
@ -1,4 +1,32 @@
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
import faiss
|
||||||
|
|
||||||
|
|
||||||
def build_searcher(config):
|
def build_searcher(config):
|
||||||
pass
|
return Searcher(config)
|
||||||
|
|
||||||
|
|
||||||
|
class Searcher:
|
||||||
|
def __init__(self, config):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.faiss_searcher = faiss.read_index(
|
||||||
|
os.path.join(config["index_dir"], "vector.index"))
|
||||||
|
|
||||||
|
with open(os.path.join(config["index_dir"], "id_map.pkl"), "rb") as fd:
|
||||||
|
self.id_map = pickle.load(fd)
|
||||||
|
|
||||||
|
self.return_k = config["return_k"]
|
||||||
|
|
||||||
|
def process(self, data):
|
||||||
|
features = data["features"]
|
||||||
|
scores, docs = self.faiss_searcher.search(features, self.return_k)
|
||||||
|
|
||||||
|
preds = {}
|
||||||
|
preds["rec_docs"] = self.id_map[docs[0][0]].split()[1]
|
||||||
|
preds["rec_scores"] = scores[0][0]
|
||||||
|
|
||||||
|
data["search_res"] = preds
|
||||||
|
return data
|
||||||
|
|
Loading…
Reference in New Issue