yashvshetty commited on
Commit
adbdf7d
·
1 Parent(s): 7b3c958

Fix: total_mem -> total_memory in training script

Browse files
Files changed (1) hide show
  1. scripts/train_lora.py +1 -1
scripts/train_lora.py CHANGED
@@ -15,7 +15,7 @@ print("CLARKE LoRA TRAINING - Starting")
15
  print("=" * 60)
16
 
17
  print(f"GPU: {torch.cuda.get_device_name(0)}")
18
- print(f"GPU Memory: {torch.cuda.get_device_properties(0).total_mem / 1e9:.1f} GB")
19
 
20
  MODEL_ID = "google/medgemma-27b-text-it"
21
  ADAPTER_REPO = "yashvshetty/clarke-medgemma-27b-lora"
 
15
  print("=" * 60)
16
 
17
  print(f"GPU: {torch.cuda.get_device_name(0)}")
18
+ print(f"GPU Memory: {torch.cuda.get_device_properties(0).total_memory / 1e9:.1f} GB")
19
 
20
  MODEL_ID = "google/medgemma-27b-text-it"
21
  ADAPTER_REPO = "yashvshetty/clarke-medgemma-27b-lora"