Nekochu commited on
Commit
a6b0e73
·
1 Parent(s): 2e461fa

fix training: use LoRAConfigV2 instead of Union type alias

Browse files
Files changed (1) hide show
  1. app.py +2 -4
app.py CHANGED
@@ -302,8 +302,7 @@ def train_lora(
302
 
303
  from acestep.training_v2.model_loader import load_decoder_for_training
304
  from acestep.training_v2.trainer_fixed import FixedLoRATrainer
305
- from acestep.training_v2.fixed_lora_module import AdapterConfig
306
- from acestep.training_v2.configs import TrainingConfigV2
307
 
308
  # Load model for training
309
  model = load_decoder_for_training(
@@ -313,11 +312,10 @@ def train_lora(
313
  precision="float32",
314
  )
315
 
316
- adapter_cfg = AdapterConfig(
317
  rank=rank,
318
  alpha=rank,
319
  dropout=0.0,
320
- adapter_type="lora",
321
  )
322
 
323
  train_cfg = TrainingConfigV2(
 
302
 
303
  from acestep.training_v2.model_loader import load_decoder_for_training
304
  from acestep.training_v2.trainer_fixed import FixedLoRATrainer
305
+ from acestep.training_v2.configs import TrainingConfigV2, LoRAConfigV2
 
306
 
307
  # Load model for training
308
  model = load_decoder_for_training(
 
312
  precision="float32",
313
  )
314
 
315
+ adapter_cfg = LoRAConfigV2(
316
  rank=rank,
317
  alpha=rank,
318
  dropout=0.0,
 
319
  )
320
 
321
  train_cfg = TrainingConfigV2(