Merge pull request #172 from marty1885/fix-long-inference-gpu

Fix extremely long inference time when using CUDA with short sentences.
This commit is contained in:
Michael Hansen
2023-11-22 20:40:35 -06:00
committed by GitHub

View File

@@ -41,7 +41,7 @@ class PiperVoice:
sess_options=onnxruntime.SessionOptions(),
providers=["CPUExecutionProvider"]
if not use_cuda
else ["CUDAExecutionProvider"],
else [("CUDAExecutionProvider", {"cudnn_conv_algo_search": "HEURISTIC"})],
),
)