Spaces:
Paused
Paused
Zhen Ye
commited on
Commit
·
e43c504
1
Parent(s):
38a60a8
Uncomment CUDA_VISIBLE_DEVICES clearing to enable all GPUs
Browse files- inference.py +3 -3
inference.py
CHANGED
|
@@ -621,9 +621,9 @@ def run_inference(
|
|
| 621 |
|
| 622 |
# Clear CUDA_VISIBLE_DEVICES to ensure we see all GPUs if not already handled
|
| 623 |
# This must be done BEFORE any torch.cuda calls in this scope if the env was modified externally
|
| 624 |
-
|
| 625 |
-
|
| 626 |
-
|
| 627 |
|
| 628 |
num_gpus = torch.cuda.device_count()
|
| 629 |
logging.info(f"[DEBUG] num_gpus after clear: {num_gpus}")
|
|
|
|
| 621 |
|
| 622 |
# Clear CUDA_VISIBLE_DEVICES to ensure we see all GPUs if not already handled
|
| 623 |
# This must be done BEFORE any torch.cuda calls in this scope if the env was modified externally
|
| 624 |
+
if "CUDA_VISIBLE_DEVICES" in os.environ:
|
| 625 |
+
logging.info("[DEBUG] Deleting CUDA_VISIBLE_DEVICES from env")
|
| 626 |
+
del os.environ["CUDA_VISIBLE_DEVICES"]
|
| 627 |
|
| 628 |
num_gpus = torch.cuda.device_count()
|
| 629 |
logging.info(f"[DEBUG] num_gpus after clear: {num_gpus}")
|