fix(device): correct attribute name from total_mem to total_memory
Browse filestorch.cuda.get_device_properties() exposes total_memory, not total_mem.
This caused an AttributeError when calling detect_gpu_info() or auto_configure().
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
- llm_lab/utils/device.py +2 -2
llm_lab/utils/device.py
CHANGED
|
@@ -24,7 +24,7 @@ def detect_gpu_info() -> dict:
|
|
| 24 |
return {}
|
| 25 |
return {
|
| 26 |
"name": torch.cuda.get_device_name(),
|
| 27 |
-
"memory_gb": round(torch.cuda.get_device_properties(0).
|
| 28 |
}
|
| 29 |
|
| 30 |
|
|
@@ -45,7 +45,7 @@ def auto_configure(config: "TrainConfig") -> "TrainConfig":
|
|
| 45 |
return config
|
| 46 |
|
| 47 |
gpu_name = torch.cuda.get_device_name().lower()
|
| 48 |
-
gpu_mem = torch.cuda.get_device_properties(0).
|
| 49 |
|
| 50 |
print(f"\n🔍 GPU detected: {torch.cuda.get_device_name()} ({gpu_mem:.1f} GB)")
|
| 51 |
|
|
|
|
| 24 |
return {}
|
| 25 |
return {
|
| 26 |
"name": torch.cuda.get_device_name(),
|
| 27 |
+
"memory_gb": round(torch.cuda.get_device_properties(0).total_memory / 1e9, 1),
|
| 28 |
}
|
| 29 |
|
| 30 |
|
|
|
|
| 45 |
return config
|
| 46 |
|
| 47 |
gpu_name = torch.cuda.get_device_name().lower()
|
| 48 |
+
gpu_mem = torch.cuda.get_device_properties(0).total_memory / 1e9
|
| 49 |
|
| 50 |
print(f"\n🔍 GPU detected: {torch.cuda.get_device_name()} ({gpu_mem:.1f} GB)")
|
| 51 |
|