Delete main_api.py
Browse files- main_api.py +0 -117
main_api.py
DELETED
|
@@ -1,117 +0,0 @@
|
|
| 1 |
-
# ============================
|
| 2 |
-
# PATCH: main_api.py (FastAPI) – refine_narration via remote instruct when configured
|
| 3 |
-
# ============================
|
| 4 |
-
from __future__ import annotations
|
| 5 |
-
from fastapi import FastAPI, UploadFile, File, Form
|
| 6 |
-
from fastapi.responses import JSONResponse
|
| 7 |
-
from fastapi.middleware.cors import CORSMiddleware
|
| 8 |
-
from pathlib import Path
|
| 9 |
-
import shutil
|
| 10 |
-
import uvicorn
|
| 11 |
-
import json
|
| 12 |
-
|
| 13 |
-
from video_processing import process_video_pipeline
|
| 14 |
-
from casting_loader import ensure_chroma, build_faces_index, build_voices_index
|
| 15 |
-
from narration_system import NarrationSystem
|
| 16 |
-
from llm_router import load_yaml, LLMRouter
|
| 17 |
-
|
| 18 |
-
app = FastAPI(title="Veureu Engine API", version="0.2.0")
|
| 19 |
-
app.add_middleware(
|
| 20 |
-
CORSMiddleware,
|
| 21 |
-
allow_origins=["*"],
|
| 22 |
-
allow_credentials=True,
|
| 23 |
-
allow_methods=["*"],
|
| 24 |
-
allow_headers=["*"],
|
| 25 |
-
)
|
| 26 |
-
|
| 27 |
-
ROOT = Path("/tmp/veureu"); ROOT.mkdir(parents=True, exist_ok=True)
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
@app.get("/")
|
| 31 |
-
def root():
|
| 32 |
-
return {"ok": True, "service": "veureu-engine"}
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
@app.post("/process_video")
|
| 36 |
-
async def process_video(
|
| 37 |
-
video_file: UploadFile = File(...),
|
| 38 |
-
config_path: str = Form("config.yaml"),
|
| 39 |
-
out_root: str = Form("results"),
|
| 40 |
-
db_dir: str = Form("chroma_db"),
|
| 41 |
-
):
|
| 42 |
-
tmp_video = ROOT / video_file.filename
|
| 43 |
-
with tmp_video.open("wb") as f:
|
| 44 |
-
shutil.copyfileobj(video_file.file, f)
|
| 45 |
-
|
| 46 |
-
result = process_video_pipeline(str(tmp_video), config_path=config_path, out_root=out_root, db_dir=db_dir)
|
| 47 |
-
return JSONResponse(result)
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
@app.post("/load_casting")
|
| 51 |
-
async def load_casting(
|
| 52 |
-
faces_dir: str = Form("identities/faces"),
|
| 53 |
-
voices_dir: str = Form("identities/voices"),
|
| 54 |
-
db_dir: str = Form("chroma_db"),
|
| 55 |
-
drop_collections: bool = Form(False),
|
| 56 |
-
):
|
| 57 |
-
client = ensure_chroma(Path(db_dir))
|
| 58 |
-
n_faces = build_faces_index(Path(faces_dir), client, collection_name="index_faces", drop=drop_collections)
|
| 59 |
-
n_voices = build_voices_index(Path(voices_dir), client, collection_name="index_voices", drop=drop_collections)
|
| 60 |
-
return {"ok": True, "faces": n_faces, "voices": n_voices}
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
@app.post("/refine_narration")
|
| 64 |
-
async def refine_narration(
|
| 65 |
-
dialogues_srt: str = Form(...),
|
| 66 |
-
frame_descriptions_json: str = Form("[]"),
|
| 67 |
-
config_path: str = Form("config.yaml"),
|
| 68 |
-
):
|
| 69 |
-
cfg = load_yaml(config_path)
|
| 70 |
-
frames = json.loads(frame_descriptions_json)
|
| 71 |
-
|
| 72 |
-
# Si el instruct está configurado como remoto, usamos el router; si no, caemos en NarrationSystem existente
|
| 73 |
-
model_name = cfg.get("narration", {}).get("model", "salamandra-instruct")
|
| 74 |
-
use_remote = model_name in (cfg.get("models", {}).get("routing", {}).get("use_remote_for", []))
|
| 75 |
-
|
| 76 |
-
if use_remote:
|
| 77 |
-
router = LLMRouter(cfg)
|
| 78 |
-
# Implementación simplificada de refinado usando el modelo instruct remoto.
|
| 79 |
-
# Mantén la lógica de prompts alineada con NarrationSystem si quieres 1:1.
|
| 80 |
-
system_msg = (
|
| 81 |
-
"Eres un sistema de audiodescripción que cumple UNE-153010. "
|
| 82 |
-
"Fusiona diálogos del SRT con descripciones concisas en los huecos, evitando redundancias. "
|
| 83 |
-
"Devuelve JSON con {narrative_text, srt_text}."
|
| 84 |
-
)
|
| 85 |
-
prompt = json.dumps({"dialogues_srt": dialogues_srt, "frames": frames, "rules": cfg.get("narration", {})}, ensure_ascii=False)
|
| 86 |
-
try:
|
| 87 |
-
txt = router.instruct(prompt=prompt, system=system_msg, model=model_name)
|
| 88 |
-
out = {}
|
| 89 |
-
try:
|
| 90 |
-
out = json.loads(txt)
|
| 91 |
-
except Exception:
|
| 92 |
-
out = {"narrative_text": txt, "srt_text": ""}
|
| 93 |
-
return {
|
| 94 |
-
"narrative_text": out.get("narrative_text", ""),
|
| 95 |
-
"srt_text": out.get("srt_text", ""),
|
| 96 |
-
"approved": True,
|
| 97 |
-
"critic_feedback": "",
|
| 98 |
-
}
|
| 99 |
-
except Exception as e:
|
| 100 |
-
# Fallback a NarrationSystem local si falla el remoto
|
| 101 |
-
ns = NarrationSystem(model_url=None, une_guidelines_path=cfg.get("narration", {}).get("une_guidelines_path", "UNE_153010.txt"))
|
| 102 |
-
res = ns.run(dialogues_srt, frames)
|
| 103 |
-
return {"narrative_text": res.narrative_text, "srt_text": res.srt_text, "approved": res.approved, "critic_feedback": res.critic_feedback}
|
| 104 |
-
|
| 105 |
-
# Camino local (usa tu NarrationSystem actual)
|
| 106 |
-
ns = NarrationSystem(model_url=None, une_guidelines_path=cfg.get("narration", {}).get("une_guidelines_path", "UNE_153010.txt"))
|
| 107 |
-
out = ns.run(dialogues_srt, frames)
|
| 108 |
-
return {
|
| 109 |
-
"narrative_text": out.narrative_text,
|
| 110 |
-
"srt_text": out.srt_text,
|
| 111 |
-
"approved": out.approved,
|
| 112 |
-
"critic_feedback": out.critic_feedback,
|
| 113 |
-
}
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
if __name__ == "__main__":
|
| 117 |
-
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|