Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import os | |
| import asyncio | |
| from anthropic import Anthropic | |
| from mcp import ClientSession, StdioServerParameters | |
| from mcp.client.stdio import stdio_client | |
| from contextlib import AsyncExitStack | |
| # --- Clé API Anthropique --- | |
| cle_api = os.environ.get("CLE_API_ANTHROPIC") | |
| # Initialisation du client Anthropic | |
| clientLLM = Anthropic() | |
| # --- Event Loop --- | |
| loop = asyncio.new_event_loop() | |
| asyncio.set_event_loop(loop) | |
| # --- Wrapper MCP --- | |
| class MCPClientWrapper: | |
| def __init__(self): | |
| self.session = None | |
| self.exit_stack = None | |
| self.tools = [] | |
| def connect(self, server_path: str) -> str: | |
| return loop.run_until_complete(self._connect(server_path)) | |
| async def _connect(self, server_path: str) -> str: | |
| if self.exit_stack: | |
| await self.exit_stack.aclose() | |
| self.exit_stack = AsyncExitStack() | |
| is_python = server_path.endswith('.py') | |
| command = "python" if is_python else "node" | |
| server_params = StdioServerParameters( | |
| command=command, | |
| args=[server_path], | |
| env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"} | |
| ) | |
| stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params)) | |
| self.stdio, self.write = stdio_transport | |
| self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write)) | |
| await self.session.initialize() | |
| response = await self.session.list_tools() | |
| self.tools = [{ | |
| "name": tool.name, | |
| "description": tool.description, | |
| "input_schema": tool.inputSchema | |
| } for tool in response.tools] | |
| tool_names = [tool["name"] for tool in self.tools] | |
| return f"Connected to MCP server. Available tools: {', '.join(tool_names)}" | |
| # --- Connexion au serveur MCP --- | |
| clientMCP = MCPClientWrapper() | |
| clientMCP.connect("mcp_server.py") | |
| print(clientMCP.tools) | |
| # --- Fonction chatbot reliée à Claude --- | |
| def chatbot(message, history): | |
| # Préparer l’historique pour Anthropique | |
| messages = [] | |
| for user_msg, bot_msg in history: | |
| messages.append({"role": "user", "content": user_msg}) | |
| messages.append({"role": "assistant", "content": bot_msg}) | |
| messages.append({"role": "user", "content": message}) | |
| # Appel API Anthropique | |
| response = clientLLM.messages.create( | |
| model="claude-3-5-sonnet-20240620", # tu peux changer pour claude-3-opus ou autre | |
| max_tokens=500, | |
| messages=messages, | |
| tools=clientMCP.tools if clientMCP.tools else None | |
| ) | |
| # Récupération de la réponse | |
| bot_reply = "" | |
| if response.content: | |
| for block in response.content: | |
| if block.type == "text": | |
| bot_reply += block.text.strip() | |
| history.append(("Vous: " + message, "Bot: " + bot_reply)) | |
| return history, history | |
| # --- UI Gradio --- | |
| with gr.Blocks() as demo: | |
| chatbot_ui = gr.Chatbot(label="ChatBot") | |
| msg = gr.Textbox(placeholder="Écrivez un message...") | |
| msg.submit(chatbot, [msg, chatbot_ui], [chatbot_ui, chatbot_ui]) | |
| demo.launch(debug=True) | |