Spaces:
Running on Zero
Running on Zero
update api server
Browse files- acestep/api_server.py +1 -1
acestep/api_server.py
CHANGED
|
@@ -535,10 +535,10 @@ def create_app() -> FastAPI:
|
|
| 535 |
|
| 536 |
if sample_mode:
|
| 537 |
print("[api_server] Sample mode: generating random caption/lyrics via LM")
|
|
|
|
| 538 |
sample_metadata, sample_status = llm.understand_audio_from_codes(
|
| 539 |
audio_codes="NO USER INPUT",
|
| 540 |
temperature=req.lm_temperature,
|
| 541 |
-
negative_prompt=req.lm_negative_prompt,
|
| 542 |
top_k=lm_top_k if lm_top_k > 0 else None,
|
| 543 |
top_p=lm_top_p if lm_top_p < 1.0 else None,
|
| 544 |
repetition_penalty=req.lm_repetition_penalty,
|
|
|
|
| 535 |
|
| 536 |
if sample_mode:
|
| 537 |
print("[api_server] Sample mode: generating random caption/lyrics via LM")
|
| 538 |
+
# Note: understand_audio_from_codes does not support cfg_scale or negative_prompt
|
| 539 |
sample_metadata, sample_status = llm.understand_audio_from_codes(
|
| 540 |
audio_codes="NO USER INPUT",
|
| 541 |
temperature=req.lm_temperature,
|
|
|
|
| 542 |
top_k=lm_top_k if lm_top_k > 0 else None,
|
| 543 |
top_p=lm_top_p if lm_top_p < 1.0 else None,
|
| 544 |
repetition_penalty=req.lm_repetition_penalty,
|