Nekochu commited on
Commit
e62602f
·
1 Parent(s): 5fe3c53

fix: adapter saved to clean dir, LM dropdown no 'Default', on-demand download

Browse files
Files changed (2) hide show
  1. app.py +31 -16
  2. train_engine.py +4 -5
app.py CHANGED
@@ -179,14 +179,27 @@ def _run_pipeline(caption, lyrics, bpm, duration, seed, steps, output_format,
179
  # LM model scanning & on-demand download
180
  # ---------------------------------------------------------------------------
181
 
 
 
 
 
 
 
182
  def _scan_lm_models():
183
- """Scan /app/models for *-lm-*.gguf files, return list of filenames."""
184
- models = []
185
  if os.path.isdir(MODELS_DIR):
186
- for f in sorted(os.listdir(MODELS_DIR)):
187
  if "-lm-" in f and f.endswith(".gguf"):
188
- models.append(f)
189
- return models
 
 
 
 
 
 
 
190
 
191
 
192
  def _download_lm_model(filename):
@@ -372,7 +385,10 @@ def gradio_main():
372
 
373
  actual_seed = None if seed is None or int(seed) < 0 else int(seed)
374
  adapter = None if lora_select == "None (no LoRA)" else lora_select
375
- lm_model = None if not lm_model_select or lm_model_select == "Default" else lm_model_select
 
 
 
376
 
377
  progress_map = {
378
  "lm_submit": (0.05, "Submitting LM job..."),
@@ -450,10 +466,12 @@ def gradio_main():
450
  lr = float(lr)
451
  rank = max(1, min(int(rank), 64))
452
 
453
- output_dir = os.path.join(ADAPTER_DIR, lora_name)
454
- os.makedirs(output_dir, exist_ok=True)
455
- audio_dir = os.path.join(output_dir, "audio_input")
456
  os.makedirs(audio_dir, exist_ok=True)
 
 
457
 
458
  # Copy uploaded audio files
459
  _log(f"[INFO] Preparing {len(audio_files)} audio files...")
@@ -478,7 +496,7 @@ def gradio_main():
478
  _log("[Step 1/2] Preprocessing audio...")
479
  yield _log_text(), gr.update(visible=False), gr.update(visible=True)
480
 
481
- preprocessed_dir = os.path.join(output_dir, "preprocessed_tensors")
482
 
483
  def preprocess_progress(current, total, desc):
484
  _log(f" {desc} ({current}/{total})")
@@ -514,7 +532,7 @@ def gradio_main():
514
 
515
  for msg in train_lora_generator(
516
  dataset_dir=preprocessed_dir,
517
- output_dir=output_dir,
518
  checkpoint_dir=ACE_CHECKPOINT_DIR,
519
  epochs=epochs,
520
  lr=lr,
@@ -580,10 +598,7 @@ def gradio_main():
580
 
581
  # -- Build LM model choices --
582
  def _lm_model_choices():
583
- models = _scan_lm_models()
584
- choices = ["Default"]
585
- choices.extend(models)
586
- return choices
587
 
588
  # -- Build UI --
589
  CSS = """
@@ -646,7 +661,7 @@ def gradio_main():
646
  )
647
  lm_model_select = gr.Dropdown(
648
  label="LM Model", choices=_lm_model_choices(),
649
- value="Default", scale=1,
650
  )
651
 
652
  with gr.Row(elem_classes="compact-row"):
 
179
  # LM model scanning & on-demand download
180
  # ---------------------------------------------------------------------------
181
 
182
+ AVAILABLE_LM_MODELS = [
183
+ "acestep-5Hz-lm-0.6B-Q8_0.gguf",
184
+ "acestep-5Hz-lm-1.7B-Q8_0.gguf",
185
+ "acestep-5Hz-lm-4B-Q5_K_M.gguf",
186
+ ]
187
+
188
  def _scan_lm_models():
189
+ """Return all available LM models (installed get a checkmark)."""
190
+ installed = set()
191
  if os.path.isdir(MODELS_DIR):
192
+ for f in os.listdir(MODELS_DIR):
193
  if "-lm-" in f and f.endswith(".gguf"):
194
+ installed.add(f)
195
+ choices = []
196
+ for m in AVAILABLE_LM_MODELS:
197
+ label = m.replace(".gguf", "")
198
+ if m in installed:
199
+ choices.append(m)
200
+ else:
201
+ choices.append(f"{m} (download)")
202
+ return choices
203
 
204
 
205
  def _download_lm_model(filename):
 
385
 
386
  actual_seed = None if seed is None or int(seed) < 0 else int(seed)
387
  adapter = None if lora_select == "None (no LoRA)" else lora_select
388
+ lm_model_file = lm_model_select.replace(" (download)", "") if lm_model_select else None
389
+ if lm_model_file and "(download)" in (lm_model_select or ""):
390
+ _download_lm_model(lm_model_file)
391
+ lm_model = lm_model_file
392
 
393
  progress_map = {
394
  "lm_submit": (0.05, "Submitting LM job..."),
 
466
  lr = float(lr)
467
  rank = max(1, min(int(rank), 64))
468
 
469
+ work_dir = os.path.join(OUTPUT_DIR, "train_workspace", lora_name)
470
+ os.makedirs(work_dir, exist_ok=True)
471
+ audio_dir = os.path.join(work_dir, "audio_input")
472
  os.makedirs(audio_dir, exist_ok=True)
473
+ adapter_out = os.path.join(ADAPTER_DIR, lora_name)
474
+ os.makedirs(adapter_out, exist_ok=True)
475
 
476
  # Copy uploaded audio files
477
  _log(f"[INFO] Preparing {len(audio_files)} audio files...")
 
496
  _log("[Step 1/2] Preprocessing audio...")
497
  yield _log_text(), gr.update(visible=False), gr.update(visible=True)
498
 
499
+ preprocessed_dir = os.path.join(work_dir, "preprocessed_tensors")
500
 
501
  def preprocess_progress(current, total, desc):
502
  _log(f" {desc} ({current}/{total})")
 
532
 
533
  for msg in train_lora_generator(
534
  dataset_dir=preprocessed_dir,
535
+ output_dir=adapter_out,
536
  checkpoint_dir=ACE_CHECKPOINT_DIR,
537
  epochs=epochs,
538
  lr=lr,
 
598
 
599
  # -- Build LM model choices --
600
  def _lm_model_choices():
601
+ return _scan_lm_models()
 
 
 
602
 
603
  # -- Build UI --
604
  CSS = """
 
661
  )
662
  lm_model_select = gr.Dropdown(
663
  label="LM Model", choices=_lm_model_choices(),
664
+ value=_lm_model_choices()[0] if _lm_model_choices() else None, scale=1,
665
  )
666
 
667
  with gr.Row(elem_classes="compact-row"):
train_engine.py CHANGED
@@ -1316,18 +1316,17 @@ def train_lora_generator(
1316
  unload_models(model)
1317
  return
1318
 
1319
- # Final save
1320
- final_path = str(out_path / "final")
1321
  model.decoder.eval()
1322
- save_lora_adapter(model, final_path)
1323
 
1324
  final_loss = avg_epoch_loss if num_updates > 0 else 0.0
1325
  best_note = ""
1326
  if best_epoch > 0 and Path(out_path / "best").exists():
1327
  best_note = f"\n Best: {out_path / 'best'} (epoch {best_epoch}, loss: {best_loss:.4f})"
1328
  yield (
1329
- f"[OK] Training complete! LoRA saved to {final_path}{best_note}\n"
1330
- f" For inference, set your LoRA path to: {final_path}"
1331
  )
1332
  yield "[DONE]"
1333
  unload_models(model)
 
1316
  unload_models(model)
1317
  return
1318
 
1319
+ # Final save (directly to output_dir, not a subdirectory)
 
1320
  model.decoder.eval()
1321
+ save_lora_adapter(model, str(out_path))
1322
 
1323
  final_loss = avg_epoch_loss if num_updates > 0 else 0.0
1324
  best_note = ""
1325
  if best_epoch > 0 and Path(out_path / "best").exists():
1326
  best_note = f"\n Best: {out_path / 'best'} (epoch {best_epoch}, loss: {best_loss:.4f})"
1327
  yield (
1328
+ f"[OK] Training complete! LoRA saved to {out_path}{best_note}\n"
1329
+ f" Adapter ready for inference."
1330
  )
1331
  yield "[DONE]"
1332
  unload_models(model)