ASLP-lab commited on
Commit
a4df33c
·
verified ·
1 Parent(s): b3d3570

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -126,7 +126,6 @@ def _safe_load_lm(model_id: str, device: str) -> AutoModelForCausalLM:
126
  m.eval().to(device)
127
  return m
128
 
129
- @spaces.GPU
130
  def load_models(force_device: str | None = None):
131
  """本地:加载并缓存模型(无 spaces/ZeroGPU)"""
132
  global model, codec_model, asr_model, tokenizer
@@ -156,6 +155,8 @@ def load_models(force_device: str | None = None):
156
 
157
  logger.info("✅ All models loaded.")
158
 
 
 
159
  @torch.inference_mode()
160
  def inference_batch_transformers(
161
  lm: AutoModelForCausalLM,
@@ -254,7 +255,6 @@ def build_control_tags(age, gender, pitch, pitch_var, volume, speed, emo):
254
 
255
  @spaces.GPU
256
  def inference_select_best3(refined_text, instruct_text, age, gender, pitch, pitch_var, volume, speed, emo):
257
- load_models()
258
 
259
  control_tags = build_control_tags(age, gender, pitch, pitch_var, volume, speed, emo)
260
 
 
126
  m.eval().to(device)
127
  return m
128
 
 
129
  def load_models(force_device: str | None = None):
130
  """本地:加载并缓存模型(无 spaces/ZeroGPU)"""
131
  global model, codec_model, asr_model, tokenizer
 
155
 
156
  logger.info("✅ All models loaded.")
157
 
158
+ load_models()
159
+
160
  @torch.inference_mode()
161
  def inference_batch_transformers(
162
  lm: AutoModelForCausalLM,
 
255
 
256
  @spaces.GPU
257
  def inference_select_best3(refined_text, instruct_text, age, gender, pitch, pitch_var, volume, speed, emo):
 
258
 
259
  control_tags = build_control_tags(age, gender, pitch, pitch_var, volume, speed, emo)
260