Spaces:
Running
Running
Añade soporte para el cliente LangSmith en `app.py` y mejora la gestión de trazas. Se implementa la función `_flush_langsmith` para asegurar que las trazas se envían antes de la salida del proceso o entre ejecuciones. Además, se registra esta función para su ejecución al finalizar el programa, optimizando el manejo de trazas y garantizando que no se pierdan datos importantes. Se actualiza la función `respond` para llamar a `_flush_langsmith` entre solicitudes, mejorando la fiabilidad del sistema.
Browse files
app.py
CHANGED
|
@@ -1,10 +1,14 @@
|
|
| 1 |
import os
|
|
|
|
|
|
|
|
|
|
| 2 |
import base64
|
| 3 |
import mimetypes
|
| 4 |
import gradio as gr
|
| 5 |
from openai import OpenAI
|
| 6 |
from dotenv import load_dotenv
|
| 7 |
from langsmith import traceable
|
|
|
|
| 8 |
from langsmith.run_trees import RunTree
|
| 9 |
|
| 10 |
load_dotenv()
|
|
@@ -15,6 +19,35 @@ GEMINI_MODEL = "gemini-2.5-flash"
|
|
| 15 |
_api_key = os.getenv("GEMINI_API_KEY")
|
| 16 |
_client = OpenAI(api_key=_api_key, base_url=GEMINI_BASE_URL) if _api_key else None
|
| 17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
system_prompt = """
|
| 19 |
Eres un asistente experto que guía a personas no técnicas para crear:
|
| 20 |
- Credenciales de Gmail (Google Cloud) o
|
|
@@ -294,6 +327,8 @@ def respond(message, history: list[tuple[str, str]]):
|
|
| 294 |
pipeline.patch()
|
| 295 |
except Exception:
|
| 296 |
pass
|
|
|
|
|
|
|
| 297 |
except Exception as e:
|
| 298 |
if child_llm:
|
| 299 |
try:
|
|
@@ -308,6 +343,7 @@ def respond(message, history: list[tuple[str, str]]):
|
|
| 308 |
except Exception:
|
| 309 |
pass
|
| 310 |
yield f"Ocurrió un error al llamar a Gemini: {e}"
|
|
|
|
| 311 |
|
| 312 |
|
| 313 |
chat = gr.ChatInterface(
|
|
|
|
| 1 |
import os
|
| 2 |
+
import atexit
|
| 3 |
+
import asyncio
|
| 4 |
+
import inspect
|
| 5 |
import base64
|
| 6 |
import mimetypes
|
| 7 |
import gradio as gr
|
| 8 |
from openai import OpenAI
|
| 9 |
from dotenv import load_dotenv
|
| 10 |
from langsmith import traceable
|
| 11 |
+
from langsmith import Client as LangSmithClient
|
| 12 |
from langsmith.run_trees import RunTree
|
| 13 |
|
| 14 |
load_dotenv()
|
|
|
|
| 19 |
_api_key = os.getenv("GEMINI_API_KEY")
|
| 20 |
_client = OpenAI(api_key=_api_key, base_url=GEMINI_BASE_URL) if _api_key else None
|
| 21 |
|
| 22 |
+
# Optional LangSmith client for guaranteed flush
|
| 23 |
+
_ls_api_key_env = os.getenv("LANGSMITH_API_KEY")
|
| 24 |
+
_ls_client = LangSmithClient() if _ls_api_key_env else None
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _flush_langsmith():
|
| 28 |
+
"""Ensure LangSmith traces are sent before process exit or between runs."""
|
| 29 |
+
if not _ls_client:
|
| 30 |
+
return
|
| 31 |
+
try:
|
| 32 |
+
result = _ls_client.flush()
|
| 33 |
+
if inspect.isawaitable(result):
|
| 34 |
+
try:
|
| 35 |
+
asyncio.run(result)
|
| 36 |
+
except RuntimeError:
|
| 37 |
+
# If an event loop is already running (e.g., in some servers), fallback
|
| 38 |
+
loop = asyncio.get_event_loop()
|
| 39 |
+
loop.create_task(result)
|
| 40 |
+
except Exception:
|
| 41 |
+
# Best-effort flush; do not break the app
|
| 42 |
+
pass
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
if _ls_client:
|
| 46 |
+
try:
|
| 47 |
+
atexit.register(_flush_langsmith)
|
| 48 |
+
except Exception:
|
| 49 |
+
pass
|
| 50 |
+
|
| 51 |
system_prompt = """
|
| 52 |
Eres un asistente experto que guía a personas no técnicas para crear:
|
| 53 |
- Credenciales de Gmail (Google Cloud) o
|
|
|
|
| 327 |
pipeline.patch()
|
| 328 |
except Exception:
|
| 329 |
pass
|
| 330 |
+
# Ensure traces are flushed between requests
|
| 331 |
+
_flush_langsmith()
|
| 332 |
except Exception as e:
|
| 333 |
if child_llm:
|
| 334 |
try:
|
|
|
|
| 343 |
except Exception:
|
| 344 |
pass
|
| 345 |
yield f"Ocurrió un error al llamar a Gemini: {e}"
|
| 346 |
+
_flush_langsmith()
|
| 347 |
|
| 348 |
|
| 349 |
chat = gr.ChatInterface(
|