Shreyas Meher commited on
Commit
d386d50
·
1 Parent(s): 2f5b1b5

Fix: total_mem -> total_memory for CUDA device properties

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -68,7 +68,7 @@ def get_system_info():
68
  # Device
69
  if device.type == 'cuda':
70
  gpu_name = torch.cuda.get_device_name(0)
71
- vram = torch.cuda.get_device_properties(0).total_mem / (1024 ** 3)
72
  lines.append(f"GPU: {gpu_name} ({vram:.1f} GB VRAM)")
73
  lines.append("FP16 training: supported")
74
  elif device.type == 'mps':
 
68
  # Device
69
  if device.type == 'cuda':
70
  gpu_name = torch.cuda.get_device_name(0)
71
+ vram = torch.cuda.get_device_properties(0).total_memory / (1024 ** 3)
72
  lines.append(f"GPU: {gpu_name} ({vram:.1f} GB VRAM)")
73
  lines.append("FP16 training: supported")
74
  elif device.type == 'mps':