Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from openai import OpenAI | |
| import os | |
| from mcp import ClientSession, StdioServerParameters | |
| from mcp.client.stdio import stdio_client | |
| import asyncio | |
| from contextlib import AsyncExitStack | |
| cle_api = os.environ.get("CLE_API_MISTRAL") | |
| # Initialisation du client Mistral (API compatible OpenAI) | |
| client = OpenAI(api_key=cle_api, base_url="https://api.mistral.ai/v1") | |
| # Chatbot : simple écho Fonction chatbot reliée à Mistral | |
| def chatbot(message, history): | |
| # Préparer l’historique dans le format de Mistral | |
| messages = [] | |
| for user_msg, bot_msg in history: | |
| messages.append({"role": "user", "content": user_msg}) | |
| messages.append({"role": "assistant", "content": bot_msg}) | |
| messages.append({"role": "user", "content": message}) | |
| # Appel API Mistral | |
| response = client.chat.completions.create( | |
| model="mistral-small-latest", | |
| messages=messages | |
| ) | |
| bot_reply = response.choices[0].message.content.strip() | |
| history.append(("Vous: " + message, "Bot: " + bot_reply)) | |
| return history, history | |
| loop = asyncio.new_event_loop() | |
| asyncio.set_event_loop(loop) | |
| class MCPClientWrapper: | |
| def __init__(self): | |
| self.session = None | |
| self.exit_stack = None | |
| self.tools = [] | |
| def connect(self, server_path: str) -> str: | |
| return loop.run_until_complete(self._connect(server_path)) | |
| async def _connect(self, server_path: str) -> str: | |
| if self.exit_stack: | |
| await self.exit_stack.aclose() | |
| self.exit_stack = AsyncExitStack() | |
| is_python = server_path.endswith('.py') | |
| command = "python" if is_python else "node" | |
| server_params = StdioServerParameters( | |
| command=command, | |
| args=[server_path], | |
| env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"} | |
| ) | |
| stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params)) | |
| self.stdio, self.write = stdio_transport | |
| self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write)) | |
| await self.session.initialize() | |
| response = await self.session.list_tools() | |
| self.tools = [{ | |
| "name": tool.name, | |
| "description": tool.description, | |
| "input_schema": tool.inputSchema | |
| } for tool in response.tools] | |
| tool_names = [tool["name"] for tool in self.tools] | |
| return f"Connected to MCP server. Available tools: {', '.join(tool_names)}" | |
| client = MCPClientWrapper() | |
| with gr.Blocks() as demo: | |
| client.connect("mcp_server.py") | |
| print(f"Connected to MCP server. Available tools: {', '.join([tool['name'] for tool in client.tools])}") | |
| chatbot_ui = gr.Chatbot(label="ChatBot") | |
| msg = gr.Textbox(placeholder="Écrivez un message...") | |
| msg.submit(chatbot, [msg, chatbot_ui], [chatbot_ui, chatbot_ui]) | |
| demo.launch(debug=True) |