| import base64 |
| import os |
| import gradio as gr |
| from mcp import ClientSession, StdioServerParameters, types |
| from mcp.client.stdio import stdio_client |
| from smolagents import ToolCollection, CodeAgent, load_tool, tool, ToolCallingAgent, InferenceClientModel |
| from smolagents.mcp_client import MCPClient |
| from smolagents import TransformersModel |
| from dotenv import load_dotenv |
| import yaml |
| import requests |
| import json |
| from PIL import Image |
| from datetime import datetime |
| from outage_odyssey_ui import GradioUI |
| import base64 |
| from io import BytesIO |
| from smolagents import InferenceClientModel |
|
|
| |
| load_dotenv() |
| MISTRAL_API_KEY = os.getenv("MISTRAL_API_KEY") |
| ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY") |
| CODEASTREAL_API_KEY = os.getenv("CODEASTREAL_API_KEY") |
| OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") |
| HF_TOKEN = os.getenv("HF_TOKEN") |
| USE_CLOUD_MODEL = os.getenv("USE_CLOUD_MODEL", "true") |
| |
| GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") |
|
|
| if USE_CLOUD_MODEL == 'true': |
| from smolagents import LiteLLMModel |
|
|
| |
| |
| |
| |
| |
| model = InferenceClientModel( |
| model_id="deepseek-ai/DeepSeek-V3-0324", |
| provider="hyperbolic", |
| api_key=HF_TOKEN, |
| ) |
| model_description = "This agent uses MCP tools and LLM Models using LiteLLMModel via API." |
| print(model_description) |
| else: |
| from transformers import pipeline |
|
|
| print("Loading local Qwen model...") |
| model = TransformersModel( |
| model_id="Qwen3-4B", |
| device_map='auto', |
| max_new_tokens=8192, |
| trust_remote_code=True |
| ) |
| print("Local model loaded successfully.") |
| model_description = "This agent uses MCP tools and a locally-run Qwen3-4B model." |
|
|
|
|
| @tool |
| def pil_to_base64(pil_image: Image.Image) -> str: |
| """ |
| Converts a PIL Image object to a base64-encoded PNG data URL. |
| |
| This tool takes a PIL Image object and encodes it into a base64 string |
| formatted as a data URL, which can be used in HTML or other contexts that |
| support embedded images. |
| |
| Args: |
| pil_image (PIL.Image.Image): A PIL Image object to be converted. |
| |
| Returns: |
| str: A string representing the image in base64 format, prefixed with the MIME type. |
| The format is: 'data:image/png;base64,<base64_string>' |
| |
| Example: |
| >>> pil_to_base64(Image.open('example.png')) |
| 'data:image/png;base64,iVBORw0KGgoAAAANSUh.... |
| """ |
| buffer = BytesIO() |
| pil_image.save(buffer, format="PNG") |
| img_str = base64.b64encode(buffer.getvalue()).decode() |
| return f"data:image/png;base64,{img_str}" |
|
|
|
|
| try: |
| mcp_client = MCPClient({"url": "http://localhost:8000/sse"}) |
|
|
| tools = mcp_client.get_tools() |
| |
| tools_array = [{ |
| "name": tool.name, |
| "description": tool.description, |
| "inputs": tool.inputs, |
| "output_type": tool.output_type, |
| "is_initialized": tool.is_initialized |
| } for tool in tools] |
|
|
| tool_names = [tool["name"] for tool in tools_array] |
| print(f"Connected to MCP server. Available tools: {', '.join(tool_names)}") |
|
|
|
|
| with open("prompts.yml", 'r', encoding='utf-8') as stream: |
| prompt_templates = yaml.safe_load(stream) |
| |
| |
| agent = CodeAgent(tools=[ pil_to_base64,*tools], model=model, prompt_templates=prompt_templates, max_steps=10, planning_interval=5, |
| additional_authorized_imports=['time', 'math', 'queue', |
| 're', 'stat', 'collections', 'datetime', 'statistics', 'itertools', |
| 'unicodedata', 'random', 'matplotlib.pyplot', 'open', |
| 'pandas', 'numpy', 'json', 'yaml', 'plotly', 'pillow','PIL','base64' , 'io']) |
|
|
| |
| agent.name = "Outage Odyssey Agent" |
| GradioUI(agent=agent, file_upload_folder="uploaded_data").launch(server_name="0.0.0.0", server_port=7860,share=False,mcp_server=True) |
|
|
| except Exception as e: |
| print(f"Error starting Gradio: {str(e)}") |
| finally: |
| mcp_client.disconnect() |
| print("MCP client disconnected") |
|
|