conversation / app.py
Merlintxu's picture
Update app.py
35ac7e4 verified
from __future__ import annotations
import asyncio
import tempfile
from pathlib import Path
from typing import Optional
import gradio as gr
from conversation_storyline.io import load_messages, load_messages_from_text
from conversation_storyline.pipeline import run_pipeline
from conversation_storyline.plots import (
load_graph_json,
load_interactions_df,
plot_reply_sankey,
plot_sentiment_histogram,
plot_sentiment_over_time,
plot_speaker_activity_heatmap,
plot_speaker_topic_heatmap,
plot_topic_shift_timeline,
)
def get_backend(name: str):
if name == "openai":
from conversation_storyline.llm_backends.openai_backend import OpenAIBackend
return OpenAIBackend()
elif name == "outlines":
from conversation_storyline.llm_backends.outlines_backend import OutlinesBackend
return OutlinesBackend()
else:
raise ValueError("backend inválido")
async def _run(file_path: Optional[str], transcript_text: str, backend: str):
transcript_text = (transcript_text or "").strip()
if transcript_text:
msgs = load_messages_from_text(transcript_text)
else:
if not file_path:
raise ValueError("Debes pegar un transcript o subir un archivo.")
msgs = load_messages(file_path)
b = get_backend(backend)
outdir = Path(tempfile.mkdtemp(prefix="storyline_"))
await run_pipeline(msgs, b, str(outdir))
png = outdir / "storyline.png"
html = outdir / "storyline.html"
graph = outdir / "graph.json"
interactions = outdir / "interactions.jsonl"
metrics = outdir / "metrics.parquet"
html_inline = html.read_text(encoding="utf-8", errors="ignore") if html.exists() else None
figs = [None] * 5
try:
df = load_interactions_df(outdir)
g = load_graph_json(outdir)
figs = [
plot_sentiment_over_time(df),
plot_sentiment_histogram(df),
plot_speaker_topic_heatmap(df),
plot_speaker_activity_heatmap(df),
plot_reply_sankey(g),
]
topic_shift_fig = plot_topic_shift_timeline(df)
except Exception:
topic_shift_fig = None
return (
str(png) if png.exists() else None,
html_inline,
str(html) if html.exists() else None,
str(graph) if graph.exists() else None,
str(interactions) if interactions.exists() else None,
str(metrics) if metrics.exists() else None,
figs[0],
figs[1],
figs[2],
figs[3],
figs[4],
topic_shift_fig,
)
def run_ui(file_obj, transcript_text: str, backend: str):
file_path = file_obj.name if file_obj is not None else None
return asyncio.run(_run(file_path, transcript_text, backend))
with gr.Blocks(title="Conversation Storyline – v4") as demo:
gr.Markdown("# Conversation Storyline – v4\nPega un transcript o sube TXT/CSV.")
with gr.Row():
f = gr.File(label="Upload (.txt o .csv)")
backend = gr.Dropdown(choices=["openai", "outlines"], value="openai", label="Backend LLM")
transcript_text = gr.Textbox(label="O pega aquí el transcript", lines=10)
btn = gr.Button("Run", variant="primary")
with gr.Tabs():
with gr.Tab("Storyline"):
with gr.Row():
out_png = gr.Image(label="Storyline (PNG)", type="filepath")
out_story_html = gr.HTML(label="Storyline (HTML embebido)")
out_html_file = gr.File(label="Storyline HTML (descarga)")
with gr.Tab("Analítica"):
out_sentiment = gr.Plot(label="Sentiment timeline")
out_hist = gr.Plot(label="Sentiment histogram")
out_topic_heat = gr.Plot(label="Speaker × topic heatmap")
out_activity_heat = gr.Plot(label="Speaker activity heatmap")
out_topic_shifts = gr.Plot(label="Topic shifts timeline")
with gr.Tab("Grafo"):
out_sankey = gr.Plot(label="Sankey replies")
out_graph = gr.File(label="Graph JSON")
with gr.Tab("Artifacts"):
out_interactions = gr.File(label="interactions.jsonl")
out_metrics = gr.File(label="metrics.parquet")
btn.click(
fn=run_ui,
inputs=[f, transcript_text, backend],
outputs=[
out_png,
out_story_html,
out_html_file,
out_graph,
out_interactions,
out_metrics,
out_sentiment,
out_hist,
out_topic_heat,
out_activity_heat,
out_sankey,
out_topic_shifts,
],
)
if __name__ == "__main__":
demo.launch()