From 0f63d567367b8830aaa9d92a0eee1f00a40575fb Mon Sep 17 00:00:00 2001 From: rancai Date: Thu, 20 Jun 2024 14:19:11 +0800 Subject: [PATCH] =?UTF-8?q?=E6=B7=BB=E5=8A=A0{"cudnn=5Fconv=5Falgo=5Fsearc?= =?UTF-8?q?h":=20"DEFAULT"}=E5=8F=82=E6=95=B0=EF=BC=8C=E5=8A=A0=E9=80=9Fgp?= =?UTF-8?q?u=E4=B8=8Bonnxruntime=E6=8E=A8=E7=90=86=E9=80=9F=E5=BA=A6?= =?UTF-8?q?=EF=BC=9B=E6=8C=87=E5=AE=9A"CPUExecutionProvider"=E5=8F=82?= =?UTF-8?q?=E6=95=B0=E8=A7=A3=E5=86=B3cpu=E7=8E=AF=E5=A2=83=E4=B8=8Bonnxru?= =?UTF-8?q?ntime=E6=8A=A5=E9=94=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tools/infer/utility.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/infer/utility.py b/tools/infer/utility.py index 4a734683de..a71f198bb0 100644 --- a/tools/infer/utility.py +++ b/tools/infer/utility.py @@ -197,10 +197,10 @@ def create_predictor(args, mode, logger): raise ValueError("not find model file path {}".format(model_file_path)) if args.use_gpu: sess = ort.InferenceSession( - model_file_path, providers=["CUDAExecutionProvider"] + model_file_path, providers=[("CUDAExecutionProvider", {"cudnn_conv_algo_search": "DEFAULT"})] ) else: - sess = ort.InferenceSession(model_file_path) + sess = ort.InferenceSession(model_file_path, providers=["CPUExecutionProvider"]) return sess, sess.get_inputs()[0], None, None else: