Update config.py

Change the inference device for Mac to accelerate inference and reduce memory leak
main
XXXXRT666 1 year ago committed by GitHub
parent a16de2e7c6
commit 3180294710
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -20,7 +20,7 @@ python_exec = sys.executable or "python"
if torch.cuda.is_available(): if torch.cuda.is_available():
infer_device = "cuda" infer_device = "cuda"
elif torch.backends.mps.is_available(): elif torch.backends.mps.is_available():
infer_device = "mps" infer_device = "cpu"
else: else:
infer_device = "cpu" infer_device = "cpu"

Loading…
Cancel
Save