[third-party] Fix the issue of inference errors with KE models in ONNX format (#14138)

* fix inference KIE model using onnx model

* fix code style

* fix onnx inputs compatiblility with det and rec

* fix code style
pull/14147/head
Alex 2024-11-02 00:14:34 +08:00 committed by GitHub
parent d3d7e85883
commit 58e876d38d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 20 additions and 6 deletions

View File

@ -40,6 +40,7 @@ logger = get_logger()
class SerPredictor(object):
def __init__(self, args):
self.args = args
self.ocr_engine = PaddleOCR(
use_angle_cls=args.use_angle_cls,
det_model_dir=args.det_model_dir,
@ -113,15 +114,22 @@ class SerPredictor(object):
data[idx] = np.expand_dims(data[idx], axis=0)
else:
data[idx] = [data[idx]]
if self.args.use_onnx:
input_tensor = {
name: data[idx] for idx, name in enumerate(self.input_tensor)
}
self.output_tensors = self.predictor.run(None, input_tensor)
else:
for idx in range(len(self.input_tensor)):
self.input_tensor[idx].copy_from_cpu(data[idx])
for idx in range(len(self.input_tensor)):
self.input_tensor[idx].copy_from_cpu(data[idx])
self.predictor.run()
self.predictor.run()
outputs = []
for output_tensor in self.output_tensors:
output = output_tensor.copy_to_cpu()
output = (
output_tensor if self.args.use_onnx else output_tensor.copy_to_cpu()
)
outputs.append(output)
preds = outputs[0]

View File

@ -221,7 +221,13 @@ def create_predictor(args, mode, logger):
providers=["CPUExecutionProvider"],
sess_options=sess_options,
)
return sess, sess.get_inputs()[0], None, None
inputs = sess.get_inputs()
return (
sess,
inputs[0] if len(inputs) == 1 else [vo.name for vo in inputs],
None,
None,
)
else:
file_names = ["model", "inference"]