Spaces:
Sleeping
Sleeping
| import os | |
| import base64 | |
| import gradio as gr | |
| from smolagents import CodeAgent, HfApiModel | |
| from huggingface_hub.inference._client import InferenceClient | |
| # Langfuse config | |
| LANGFUSE_PUBLIC_KEY = os.environ.get("LANGFUSE_PUBLIC_KEY") | |
| LANGFUSE_SECRET_KEY = os.environ.get("LANGFUSE_SECRET_KEY") | |
| LANGFUSE_HOST = os.environ.get("LANGFUSE_HOST", "https://cloud.langfuse.com") | |
| HF_TOKEN = os.environ.get("HF_TOKEN") | |
| LANGFUSE_AUTH = base64.b64encode(f"{LANGFUSE_PUBLIC_KEY}:{LANGFUSE_SECRET_KEY}".encode()).decode() | |
| os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = f"{LANGFUSE_HOST}/api/public/otel" | |
| os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}" | |
| # Telemetry | |
| from opentelemetry.sdk.trace import TracerProvider | |
| from openinference.instrumentation.smolagents import SmolagentsInstrumentor | |
| from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter | |
| from opentelemetry.sdk.trace.export import SimpleSpanProcessor | |
| from opentelemetry import trace | |
| trace_provider = TracerProvider() | |
| trace_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter())) | |
| trace.set_tracer_provider(trace_provider) | |
| SmolagentsInstrumentor().instrument(tracer_provider=trace_provider) | |
| # Agent setup | |
| client = InferenceClient(token=HF_TOKEN) | |
| model = HfApiModel(model="Qwen/Qwen2.5-Coder-32B-Instruct") | |
| agent = CodeAgent(tools=[], model=model) | |
| def add_user_message(prompt, history): | |
| history.append({"role": "user", "content": prompt}) | |
| return history, history | |
| def get_response(history): | |
| prompt = history[-1]["content"] | |
| response = str(agent.run(prompt)) | |
| history.append({"role": "assistant", "content": response}) | |
| return history | |
| # Arayüz | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## 💬 SmolAgent (Tracing powered by Langfuse)\n" | |
| "#### 🔍 Memory-enabled chatbot using Qwen2.5 + full telemetry via Langfuse.") | |
| chatbot = gr.Chatbot(type="messages", height=600) | |
| state = gr.State([]) | |
| with gr.Row(): | |
| txt = gr.Textbox(placeholder="Ask something...", show_label=False) | |
| txt.submit(add_user_message, [txt, state], [chatbot, state], queue=False).then( | |
| get_response, [state], [chatbot] | |
| ) | |
| txt.submit(lambda: "", None, txt, queue=False) | |
| if __name__ == "__main__": | |
| demo.launch() | |