Eddyhzd commited on
Commit
81d655a
·
1 Parent(s): 54fe756
Files changed (1) hide show
  1. app.py +123 -69
app.py CHANGED
@@ -1,93 +1,147 @@
1
- import gradio as gr
2
- from openai import OpenAI
3
- import os
4
- from mcp import ClientSession, StdioServerParameters
5
- from mcp.client.stdio import stdio_client
6
  import asyncio
 
7
  from contextlib import AsyncExitStack
8
 
9
- cle_api = os.environ.get("CLE_API_MISTRAL")
10
-
11
- # Initialisation du client Mistral (API compatible OpenAI)
12
- clientLLM = OpenAI(api_key=cle_api, base_url="https://api.mistral.ai/v1")
13
 
 
 
14
 
15
- loop = asyncio.new_event_loop()
16
- asyncio.set_event_loop(loop)
17
 
18
- class MCPClientWrapper:
19
  def __init__(self):
20
- self.session = None
21
- self.exit_stack = None
22
- self.tools = []
23
-
24
- def connect(self, server_path: str) -> str:
25
- return loop.run_until_complete(self._connect(server_path))
26
-
27
- async def _connect(self, server_path: str) -> str:
28
- if self.exit_stack:
29
- await self.exit_stack.aclose()
30
-
31
  self.exit_stack = AsyncExitStack()
 
 
 
 
32
 
33
- is_python = server_path.endswith('.py')
 
 
 
 
 
 
 
34
  command = "python" if is_python else "node"
35
-
36
  server_params = StdioServerParameters(
37
  command=command,
38
- args=[server_path],
39
- env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"}
40
  )
41
 
42
  stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
43
  self.stdio, self.write = stdio_transport
44
-
45
  self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
 
46
  await self.session.initialize()
47
 
 
48
  response = await self.session.list_tools()
49
- self.tools = [{
50
- "type": "function",
51
- "function":{
52
- "name": tool.name,
53
- "description": tool.description,
54
- "input_schema": tool.inputSchema
55
- }
56
- } for tool in response.tools]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
 
58
- tool_names = [tool['function']["name"] for tool in self.tools]
59
- return f"Connected to MCP server. Available tools: {', '.join(tool_names)}"
60
-
61
- clientMCP = MCPClientWrapper()
62
- clientMCP.connect("mcp_server.py")
63
- print(clientMCP.tools)
64
-
65
- # Chatbot : simple écho Fonction chatbot reliée à Mistral
66
- def chatbot(message, history):
67
- # Préparer l’historique dans le format de Mistral
68
- messages = []
69
- for user_msg, bot_msg in history:
70
- messages.append({"role": "user", "content": user_msg})
71
- messages.append({"role": "assistant", "content": bot_msg})
72
 
73
- messages.append({"role": "user", "content": message})
74
-
75
- # Appel API Mistral
76
- response = clientLLM.chat.completions.create(
77
- model="mistral-small-latest",
78
- messages=messages,
79
- tools=clientMCP.tools
80
- )
81
-
82
- bot_reply = response.choices[0].message.content.strip()
83
- history.append(("Vous: " + message, "Bot: " + bot_reply))
84
- return history, history
85
-
86
- with gr.Blocks() as demo:
87
-
88
- chatbot_ui = gr.Chatbot(label="ChatBot")
89
- msg = gr.Textbox(placeholder="Écrivez un message...")
90
 
91
- msg.submit(chatbot, [msg, chatbot_ui], [chatbot_ui, chatbot_ui])
 
 
 
 
 
 
 
 
 
 
92
 
93
- demo.launch(debug=True)
 
 
 
 
 
 
 
 
1
  import asyncio
2
+ from typing import Optional
3
  from contextlib import AsyncExitStack
4
 
5
+ from mcp import ClientSession, StdioServerParameters
6
+ from mcp.client.stdio import stdio_client
 
 
7
 
8
+ from anthropic import Anthropic
9
+ from dotenv import load_dotenv
10
 
11
+ load_dotenv() # load environment variables from .env
 
12
 
13
+ class MCPClient:
14
  def __init__(self):
15
+ # Initialize session and client objects
16
+ self.session: Optional[ClientSession] = None
 
 
 
 
 
 
 
 
 
17
  self.exit_stack = AsyncExitStack()
18
+ self.anthropic = Anthropic()
19
+
20
+ async def connect_to_server(self, server_script_path: str):
21
+ """Connect to an MCP server
22
 
23
+ Args:
24
+ server_script_path: Path to the server script (.py or .js)
25
+ """
26
+ is_python = server_script_path.endswith('.py')
27
+ is_js = server_script_path.endswith('.js')
28
+ if not (is_python or is_js):
29
+ raise ValueError("Server script must be a .py or .js file")
30
+
31
  command = "python" if is_python else "node"
 
32
  server_params = StdioServerParameters(
33
  command=command,
34
+ args=[server_script_path],
35
+ env=None
36
  )
37
 
38
  stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
39
  self.stdio, self.write = stdio_transport
 
40
  self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
41
+
42
  await self.session.initialize()
43
 
44
+ # List available tools
45
  response = await self.session.list_tools()
46
+ tools = response.tools
47
+ print("\nConnected to server with tools:", [tool.name for tool in tools])
48
+
49
+ async def process_query(self, query: str) -> str:
50
+ """Process a query using Claude and available tools"""
51
+ messages = [
52
+ {
53
+ "role": "user",
54
+ "content": query
55
+ }
56
+ ]
57
+
58
+ response = await self.session.list_tools()
59
+ available_tools = [{
60
+ "name": tool.name,
61
+ "description": tool.description,
62
+ "input_schema": tool.inputSchema
63
+ } for tool in response.tools]
64
+
65
+ # Initial Claude API call
66
+ response = self.anthropic.messages.create(
67
+ model="claude-3-5-sonnet-20241022",
68
+ max_tokens=1000,
69
+ messages=messages,
70
+ tools=available_tools
71
+ )
72
+
73
+ # Process response and handle tool calls
74
+ tool_results = []
75
+ final_text = []
76
+
77
+ for content in response.content:
78
+ if content.type == 'text':
79
+ final_text.append(content.text)
80
+ elif content.type == 'tool_use':
81
+ tool_name = content.name
82
+ tool_args = content.input
83
+
84
+ # Execute tool call
85
+ result = await self.session.call_tool(tool_name, tool_args)
86
+ tool_results.append({"call": tool_name, "result": result})
87
+ final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
88
+
89
+ # Continue conversation with tool results
90
+ if hasattr(content, 'text') and content.text:
91
+ messages.append({
92
+ "role": "assistant",
93
+ "content": content.text
94
+ })
95
+ messages.append({
96
+ "role": "user",
97
+ "content": result.content
98
+ })
99
+
100
+ # Get next response from Claude
101
+ response = self.anthropic.messages.create(
102
+ model="claude-3-5-sonnet-20241022",
103
+ max_tokens=1000,
104
+ messages=messages,
105
+ )
106
+
107
+ final_text.append(response.content[0].text)
108
+
109
+ return "\n".join(final_text)
110
+
111
+ async def chat_loop(self):
112
+ """Run an interactive chat loop"""
113
+ print("\nMCP Client Started!")
114
+ print("Type your queries or 'quit' to exit.")
115
 
116
+ while True:
117
+ try:
118
+ query = input("\nQuery: ").strip()
119
+
120
+ if query.lower() == 'quit':
121
+ break
122
+
123
+ response = await self.process_query(query)
124
+ print("\n" + response)
125
+
126
+ except Exception as e:
127
+ print(f"\nError: {str(e)}")
 
 
128
 
129
+ async def cleanup(self):
130
+ """Clean up resources"""
131
+ await self.exit_stack.aclose()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
132
 
133
+ async def main():
134
+ if len(sys.argv) < 2:
135
+ print("Usage: python client.py mcp_server.py")
136
+ sys.exit(1)
137
+
138
+ client = MCPClient()
139
+ try:
140
+ await client.connect_to_server(sys.argv[1])
141
+ await client.chat_loop()
142
+ finally:
143
+ await client.cleanup()
144
 
145
+ if __name__ == "__main__":
146
+ import sys
147
+ asyncio.run(main())