|
|
@ -19,8 +19,6 @@ exp_root = "logs"
|
|
|
|
python_exec = sys.executable or "python"
|
|
|
|
python_exec = sys.executable or "python"
|
|
|
|
if torch.cuda.is_available():
|
|
|
|
if torch.cuda.is_available():
|
|
|
|
infer_device = "cuda"
|
|
|
|
infer_device = "cuda"
|
|
|
|
elif torch.backends.mps.is_available():
|
|
|
|
|
|
|
|
infer_device = "mps"
|
|
|
|
|
|
|
|
else:
|
|
|
|
else:
|
|
|
|
infer_device = "cpu"
|
|
|
|
infer_device = "cpu"
|
|
|
|
|
|
|
|
|
|
|
|