Nekochu commited on
Commit
dece91f
·
1 Parent(s): a6b0e73

fix: LoRA rank param is 'r' not 'rank'

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -313,7 +313,7 @@ def train_lora(
313
  )
314
 
315
  adapter_cfg = LoRAConfigV2(
316
- rank=rank,
317
  alpha=rank,
318
  dropout=0.0,
319
  )
 
313
  )
314
 
315
  adapter_cfg = LoRAConfigV2(
316
+ r=rank,
317
  alpha=rank,
318
  dropout=0.0,
319
  )