solve the onnxruntime inference issue (#13154)
parent
f8ca01dc01
commit
6d5f998fe1
|
@ -197,10 +197,18 @@ def create_predictor(args, mode, logger):
|
||||||
raise ValueError("not find model file path {}".format(model_file_path))
|
raise ValueError("not find model file path {}".format(model_file_path))
|
||||||
if args.use_gpu:
|
if args.use_gpu:
|
||||||
sess = ort.InferenceSession(
|
sess = ort.InferenceSession(
|
||||||
model_file_path, providers=["CUDAExecutionProvider"]
|
model_file_path,
|
||||||
|
providers=[
|
||||||
|
(
|
||||||
|
"CUDAExecutionProvider",
|
||||||
|
{"device_id": args.gpu_id, "cudnn_conv_algo_search": "DEFAULT"},
|
||||||
|
)
|
||||||
|
],
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
sess = ort.InferenceSession(model_file_path)
|
sess = ort.InferenceSession(
|
||||||
|
model_file_path, providers=["CPUExecutionProvider"]
|
||||||
|
)
|
||||||
return sess, sess.get_inputs()[0], None, None
|
return sess, sess.get_inputs()[0], None, None
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in New Issue