serichard1 commited on
Commit
050153e
·
1 Parent(s): 5e9a195

add mistra openai fix async

Browse files
Files changed (3) hide show
  1. app.py +351 -179
  2. gradio_mcp_server.py +182 -64
  3. requirements.txt +2 -1
app.py CHANGED
@@ -1,7 +1,8 @@
1
  import asyncio
2
  import os
3
  import json
4
- from typing import List, Dict, Any, Union
 
5
  from contextlib import AsyncExitStack
6
 
7
  import gradio as gr
@@ -9,6 +10,8 @@ from gradio.components.chatbot import ChatMessage
9
  from mcp import ClientSession, StdioServerParameters
10
  from mcp.client.stdio import stdio_client
11
  from anthropic import Anthropic
 
 
12
  from dotenv import load_dotenv
13
 
14
  load_dotenv()
@@ -20,9 +23,40 @@ class MCPClientWrapper:
20
  def __init__(self):
21
  self.session = None
22
  self.exit_stack = None
23
- self.anthropic = Anthropic()
24
  self.tools = []
25
  self.connected = False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  def connect(self) -> str:
28
  return loop.run_until_complete(self._connect())
@@ -62,18 +96,77 @@ class MCPClientWrapper:
62
  self.connected = False
63
  return f"❌ Failed to connect to MCP server: {str(e)}"
64
 
65
- def process_message(self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]) -> tuple:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  if not self.session or not self.connected:
67
  return history + [
68
  {"role": "user", "content": message},
69
  {"role": "assistant", "content": "❌ MCP weather server is not connected. Please check the connection status above."}
70
- ], gr.Textbox(value="")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
72
- new_messages = loop.run_until_complete(self._process_query(message, history))
73
- return history + [{"role": "user", "content": message}] + new_messages, gr.Textbox(value="")
 
 
 
74
 
75
  async def _process_query(self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]):
 
 
 
 
76
  claude_messages = []
 
77
  for msg in history:
78
  if isinstance(msg, ChatMessage):
79
  role, content = msg.role, msg.content
@@ -85,21 +178,64 @@ class MCPClientWrapper:
85
 
86
  claude_messages.append({"role": "user", "content": message})
87
 
88
- response = self.anthropic.messages.create(
89
- model="claude-3-5-sonnet-20241022",
90
- max_tokens=1500,
91
- messages=claude_messages,
92
- tools=self.tools
93
- )
94
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95
  result_messages = []
96
 
97
  for content in response.content:
98
  if content.type == 'text':
99
- result_messages.append({
100
- "role": "assistant",
101
- "content": content.text
102
- })
103
 
104
  elif content.type == 'tool_use':
105
  tool_name = content.name
@@ -107,138 +243,24 @@ class MCPClientWrapper:
107
 
108
  result_messages.append({
109
  "role": "assistant",
110
- "content": f"🔧 I'll use the **{tool_name}** tool to fetch the weather data you requested.",
111
- "metadata": {
112
- "title": f"Using tool: {tool_name}",
113
- "log": f"Parameters: {json.dumps(tool_args, ensure_ascii=True)}",
114
- "status": "pending",
115
- "id": f"tool_call_{tool_name}"
116
- }
117
- })
118
-
119
- result_messages.append({
120
- "role": "assistant",
121
- "content": "```json\n" + json.dumps(tool_args, indent=2, ensure_ascii=True) + "\n```",
122
- "metadata": {
123
- "parent_id": f"tool_call_{tool_name}",
124
- "id": f"params_{tool_name}",
125
- "title": "Tool Parameters"
126
- }
127
  })
128
 
129
  result = await self.session.call_tool(tool_name, tool_args)
130
-
131
- if result_messages and "metadata" in result_messages[-2]:
132
- result_messages[-2]["metadata"]["status"] = "done"
133
-
134
  result_content = result.content
135
  if isinstance(result_content, list):
136
  result_content = "\n".join(str(item) for item in result_content)
137
 
138
- # Parse and format the weather data response
139
- try:
140
- result_json = json.loads(result_content)
141
-
142
- if isinstance(result_json, dict):
143
- if result_json.get("type") == "success":
144
- # Format successful weather data response
145
- station_code = result_json.get("station_code", "Unknown")
146
- weather_data = result_json.get("data", {})
147
-
148
- # Create a nicely formatted response
149
- formatted_response = f"## 🌤️ Weather Data for Station: {station_code}\n\n"
150
-
151
- if isinstance(weather_data, dict):
152
- # Show key weather information if available
153
- if "reports" in weather_data:
154
- reports = weather_data["reports"]
155
- if isinstance(reports, list) and len(reports) > 0:
156
- formatted_response += f"**Found {len(reports)} weather reports**\n\n"
157
- # Show first few reports as example
158
- for i, report in enumerate(reports[:3]):
159
- if isinstance(report, dict):
160
- timestamp = report.get("timestamp", "Unknown time")
161
- temperature = report.get("temperature", "N/A")
162
- humidity = report.get("humidity", "N/A")
163
- formatted_response += f"**Report {i+1}** ({timestamp}):\n"
164
- formatted_response += f"- Temperature: {temperature}\n"
165
- formatted_response += f"- Humidity: {humidity}\n\n"
166
-
167
- if len(reports) > 3:
168
- formatted_response += f"... and {len(reports) - 3} more reports\n\n"
169
-
170
- formatted_response += "**Raw Data:**\n```json\n" + json.dumps(weather_data, indent=2) + "\n```"
171
- else:
172
- formatted_response += "**Raw Data:**\n```json\n" + json.dumps(weather_data, indent=2) + "\n```"
173
-
174
- result_messages.append({
175
- "role": "assistant",
176
- "content": formatted_response,
177
- "metadata": {
178
- "title": f"Weather Data Retrieved",
179
- "status": "done",
180
- "id": f"success_result_{tool_name}"
181
- }
182
- })
183
-
184
- elif result_json.get("type") == "error":
185
- # Format error response
186
- error_msg = result_json.get("message", "Unknown error occurred")
187
- station_code = result_json.get("station_code", "Unknown")
188
-
189
- error_response = f"## ❌ Error Fetching Weather Data\n\n"
190
- error_response += f"**Station:** {station_code}\n"
191
- error_response += f"**Error:** {error_msg}\n\n"
192
- error_response += "**Suggestions:**\n"
193
- error_response += "- Check if the station code is correct\n"
194
- error_response += "- Ensure the weather API service is running on localhost:8888\n"
195
- error_response += "- Try a different station code\n"
196
-
197
- result_messages.append({
198
- "role": "assistant",
199
- "content": error_response,
200
- "metadata": {
201
- "title": "Weather API Error",
202
- "status": "error",
203
- "id": f"error_result_{tool_name}"
204
- }
205
- })
206
- else:
207
- # Unknown response format
208
- result_messages.append({
209
- "role": "assistant",
210
- "content": "```json\n" + result_content + "\n```",
211
- "metadata": {
212
- "title": "Raw Tool Response",
213
- "status": "done",
214
- "id": f"raw_result_{tool_name}"
215
- }
216
- })
217
- else:
218
- result_messages.append({
219
- "role": "assistant",
220
- "content": "```\n" + result_content + "\n```",
221
- "metadata": {
222
- "title": "Raw Tool Response",
223
- "status": "done",
224
- "id": f"raw_result_{tool_name}"
225
- }
226
- })
227
-
228
- except json.JSONDecodeError:
229
- result_messages.append({
230
- "role": "assistant",
231
- "content": "```\n" + result_content + "\n```",
232
- "metadata": {
233
- "title": "Raw Tool Response",
234
- "status": "done",
235
- "id": f"raw_result_{tool_name}"
236
- }
237
- })
238
 
239
- # Let Claude analyze and respond to the weather data
240
  claude_messages.append({"role": "user", "content": f"Tool result for {tool_name}: {result_content}"})
241
- next_response = self.anthropic.messages.create(
242
  model="claude-3-5-sonnet-20241022",
243
  max_tokens=1500,
244
  messages=claude_messages,
@@ -249,26 +271,123 @@ class MCPClientWrapper:
249
  "role": "assistant",
250
  "content": next_response.content[0].text
251
  })
252
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
253
  return result_messages
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
254
 
255
  client = MCPClientWrapper()
256
 
257
  def gradio_interface():
258
  with gr.Blocks(title="MCP LEXICON", theme=gr.themes.Soft()) as demo:
259
- gr.Markdown("# 🌤️ LEXICON CHATBOT - ask me anything")
260
  gr.Markdown(
261
- "Ask me about weather data from any weather station! I can fetch hourly reports, "
262
- "help you explore weather patterns, and answer questions about specific stations. "
263
- "Just ask naturally - for example: *'Get weather data for station ABC123'* or *'What stations are available?'*"
264
  )
265
 
266
- # Connection status (auto-updates on load)
267
- status = gr.Textbox(
268
- label="🔌 Connection Status",
269
- interactive=False,
270
- value="🔄 Connecting to weather server..."
271
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
272
 
273
  # Main chat interface
274
  chatbot = gr.Chatbot(
@@ -280,53 +399,106 @@ def gradio_interface():
280
  bubble_full_width=False
281
  )
282
 
283
- # Input row
284
- with gr.Row(equal_height=True):
285
- msg = gr.Textbox(
286
- label="💬 Ask about weather data",
287
- placeholder="e.g., 'Get weather data for station NYC001' or 'Show me available weather stations' or 'What's the latest data from station LAX123?'",
288
- scale=4
289
- )
290
- with gr.Column(scale=1):
291
- clear_btn = gr.Button("🗑️ Clear Chat", size="lg")
292
- reconnect_btn = gr.Button("🔄 Reconnect", size="lg")
 
 
 
 
293
 
294
- # Example queries
295
  with gr.Row():
 
 
 
 
 
 
296
  gr.Examples(
297
  examples=[
298
  "What weather stations are available?",
299
  "Get weather data for station ABC123",
300
- "Show me the latest hourly reports for station NYC001",
301
- "Get weather data for station LAX789 from page 2",
302
- "Fetch weather data for station CHI456 between 2024-01-01 and 2024-01-31"
303
  ],
304
- inputs=msg,
305
- label="💡 Example Queries"
306
  )
307
 
308
- # Auto-connect when the interface loads
309
  def auto_connect():
310
  return client.connect()
311
 
312
- # Event handlers
 
 
 
 
 
 
 
 
 
 
 
313
  demo.load(auto_connect, outputs=status)
314
- msg.submit(client.process_message, [msg, chatbot], [chatbot, msg])
315
- clear_btn.click(lambda: [], None, chatbot)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
316
  reconnect_btn.click(auto_connect, outputs=status)
317
 
318
  return demo
319
 
320
  if __name__ == "__main__":
321
- if not os.getenv("ANTHROPIC_API_KEY"):
322
- print("⚠️ Warning: ANTHROPIC_API_KEY not found in environment.")
323
- print("Please set it in your .env file or environment variables.")
324
- print("Example .env file content:")
325
- print("ANTHROPIC_API_KEY=your_api_key_here")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
326
 
327
- print("🚀 Starting MCP Weather Client...")
328
- print("📡 Will auto-connect to gradio_mcp_server.py")
329
- print("🌐 Weather API endpoint: http://localhost:8888/weather/stations")
330
 
331
  interface = gradio_interface()
332
  interface.launch(debug=True, share=True)
 
1
  import asyncio
2
  import os
3
  import json
4
+ import base64
5
+ from typing import List, Dict, Any, Union, Optional
6
  from contextlib import AsyncExitStack
7
 
8
  import gradio as gr
 
10
  from mcp import ClientSession, StdioServerParameters
11
  from mcp.client.stdio import stdio_client
12
  from anthropic import Anthropic
13
+ from openai import OpenAI
14
+ from mistralai.client import MistralClient
15
  from dotenv import load_dotenv
16
 
17
  load_dotenv()
 
23
  def __init__(self):
24
  self.session = None
25
  self.exit_stack = None
26
+ self.clients = {}
27
  self.tools = []
28
  self.connected = False
29
+ self.current_provider = "claude"
30
+ self._init_clients()
31
+
32
+ def _init_clients(self):
33
+ """Initialize all available LLM clients based on API keys"""
34
+ if os.getenv("ANTHROPIC_API_KEY"):
35
+ self.clients["claude"] = Anthropic()
36
+
37
+ if os.getenv("OPENAI_API_KEY"):
38
+ self.clients["openai"] = OpenAI()
39
+
40
+ if os.getenv("MISTRAL_API_KEY"):
41
+ self.clients["mistral"] = MistralClient(api_key=os.getenv("MISTRAL_API_KEY"))
42
+
43
+ if os.getenv("LLAMAINDEX_API_KEY"):
44
+ # Assuming this is for a generic API that accepts OpenAI-compatible format
45
+ self.clients["llama"] = OpenAI(
46
+ api_key=os.getenv("LLAMAINDEX_API_KEY"),
47
+ base_url="https://api.llamaindex.ai/v1" # Adjust URL as needed
48
+ )
49
+
50
+ def set_provider(self, provider: str):
51
+ """Set the current LLM provider"""
52
+ if provider in self.clients:
53
+ self.current_provider = provider
54
+ return f"✅ Switched to {provider.upper()}"
55
+ return f"❌ {provider.upper()} API key not found"
56
+
57
+ def get_available_providers(self) -> List[str]:
58
+ """Get list of available providers"""
59
+ return list(self.clients.keys())
60
 
61
  def connect(self) -> str:
62
  return loop.run_until_complete(self._connect())
 
96
  self.connected = False
97
  return f"❌ Failed to connect to MCP server: {str(e)}"
98
 
99
+ def _read_file_content(self, file_path: str) -> str:
100
+ """Read and extract text content from uploaded file"""
101
+ try:
102
+ file_extension = os.path.splitext(file_path)[1].lower()
103
+
104
+ if file_extension in ['.txt', '.md', '.py', '.js', '.html', '.css', '.json', '.xml', '.yaml', '.yml']:
105
+ with open(file_path, 'r', encoding='utf-8') as f:
106
+ return f.read()
107
+
108
+ elif file_extension == '.pdf':
109
+ try:
110
+ import PyPDF2
111
+ with open(file_path, 'rb') as f:
112
+ reader = PyPDF2.PdfReader(f)
113
+ text = ""
114
+ for page in reader.pages:
115
+ text += page.extract_text() + "\n"
116
+ return text
117
+ except ImportError:
118
+ return f"PDF file detected but PyPDF2 not installed. File name: {os.path.basename(file_path)}"
119
+
120
+ elif file_extension == '.csv':
121
+ with open(file_path, 'r', encoding='utf-8') as f:
122
+ return f.read()
123
+
124
+ else:
125
+ # Try to read as text, fallback to binary info
126
+ try:
127
+ with open(file_path, 'r', encoding='utf-8') as f:
128
+ return f.read()
129
+ except UnicodeDecodeError:
130
+ file_size = os.path.getsize(file_path)
131
+ return f"Binary file detected: {os.path.basename(file_path)} ({file_size} bytes)"
132
+
133
+ except Exception as e:
134
+ return f"Error reading file {os.path.basename(file_path)}: {str(e)}"
135
+
136
+ def process_message(self, message: str, files: Optional[List], history: List[Union[Dict[str, Any], ChatMessage]]) -> tuple:
137
  if not self.session or not self.connected:
138
  return history + [
139
  {"role": "user", "content": message},
140
  {"role": "assistant", "content": "❌ MCP weather server is not connected. Please check the connection status above."}
141
+ ], gr.Textbox(value=""), None
142
+
143
+ # Process uploaded files
144
+ file_content = ""
145
+ if files:
146
+ file_content = "\n\n--- UPLOADED FILES ---\n"
147
+ for file in files:
148
+ if hasattr(file, 'name'):
149
+ file_path = file.name
150
+ else:
151
+ file_path = file
152
+
153
+ content = self._read_file_content(file_path)
154
+ file_content += f"\n📄 File: {os.path.basename(file_path)}\n{content}\n"
155
+ file_content += "--- END FILES ---\n\n"
156
 
157
+ # Combine message with file content
158
+ full_message = file_content + message if file_content else message
159
+
160
+ new_messages = loop.run_until_complete(self._process_query(full_message, history))
161
+ return history + [{"role": "user", "content": message}] + new_messages, gr.Textbox(value=""), None
162
 
163
  async def _process_query(self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]):
164
+ if self.current_provider not in self.clients:
165
+ return [{"role": "assistant", "content": f"❌ {self.current_provider.upper()} client not available"}]
166
+
167
+ client = self.clients[self.current_provider]
168
  claude_messages = []
169
+
170
  for msg in history:
171
  if isinstance(msg, ChatMessage):
172
  role, content = msg.role, msg.content
 
178
 
179
  claude_messages.append({"role": "user", "content": message})
180
 
181
+ try:
182
+ if self.current_provider == "claude":
183
+ response = client.messages.create(
184
+ model="claude-3-5-sonnet-20241022",
185
+ max_tokens=1500,
186
+ messages=claude_messages,
187
+ tools=self.tools
188
+ )
189
+ return await self._process_claude_response(response, claude_messages)
190
+
191
+ elif self.current_provider == "openai":
192
+ # Convert tools format for OpenAI
193
+ openai_tools = []
194
+ for tool in self.tools:
195
+ openai_tools.append({
196
+ "type": "function",
197
+ "function": {
198
+ "name": tool["name"],
199
+ "description": tool["description"],
200
+ "parameters": tool["input_schema"]
201
+ }
202
+ })
203
+
204
+ response = client.chat.completions.create(
205
+ model="gpt-4-turbo-preview",
206
+ max_tokens=1500,
207
+ messages=claude_messages,
208
+ tools=openai_tools if openai_tools else None
209
+ )
210
+ return await self._process_openai_response(response, claude_messages)
211
+
212
+ elif self.current_provider == "mistral":
213
+ response = client.chat(
214
+ model="mistral-large-latest",
215
+ max_tokens=1500,
216
+ messages=claude_messages,
217
+ tools=self.tools if self.tools else None
218
+ )
219
+ return await self._process_mistral_response(response, claude_messages)
220
+
221
+ elif self.current_provider == "llama":
222
+ response = client.chat.completions.create(
223
+ model="llama-2-70b-chat", # Adjust model name as needed
224
+ max_tokens=1500,
225
+ messages=claude_messages,
226
+ tools=self.tools if self.tools else None
227
+ )
228
+ return await self._process_openai_response(response, claude_messages) # Same format as OpenAI
229
+
230
+ except Exception as e:
231
+ return [{"role": "assistant", "content": f"❌ Error with {self.current_provider}: {str(e)}"}]
232
+
233
+ async def _process_claude_response(self, response, claude_messages):
234
  result_messages = []
235
 
236
  for content in response.content:
237
  if content.type == 'text':
238
+ result_messages.append({"role": "assistant", "content": content.text})
 
 
 
239
 
240
  elif content.type == 'tool_use':
241
  tool_name = content.name
 
243
 
244
  result_messages.append({
245
  "role": "assistant",
246
+ "content": f"🔧 I'll use the **{tool_name}** tool to fetch the data you requested.",
247
+ "metadata": {"title": f"Using tool: {tool_name}", "status": "pending"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
248
  })
249
 
250
  result = await self.session.call_tool(tool_name, tool_args)
 
 
 
 
251
  result_content = result.content
252
  if isinstance(result_content, list):
253
  result_content = "\n".join(str(item) for item in result_content)
254
 
255
+ result_messages.append({
256
+ "role": "assistant",
257
+ "content": self._format_tool_result(result_content),
258
+ "metadata": {"title": "Tool Result", "status": "done"}
259
+ })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
260
 
261
+ # Get Claude's analysis of the result
262
  claude_messages.append({"role": "user", "content": f"Tool result for {tool_name}: {result_content}"})
263
+ next_response = self.clients["claude"].messages.create(
264
  model="claude-3-5-sonnet-20241022",
265
  max_tokens=1500,
266
  messages=claude_messages,
 
271
  "role": "assistant",
272
  "content": next_response.content[0].text
273
  })
274
+
275
+ return result_messages
276
+
277
+ async def _process_openai_response(self, response, claude_messages):
278
+ result_messages = []
279
+ message = response.choices[0].message
280
+
281
+ if message.content:
282
+ result_messages.append({"role": "assistant", "content": message.content})
283
+
284
+ if message.tool_calls:
285
+ for tool_call in message.tool_calls:
286
+ tool_name = tool_call.function.name
287
+ tool_args = json.loads(tool_call.function.arguments)
288
+
289
+ result_messages.append({
290
+ "role": "assistant",
291
+ "content": f"🔧 Using **{tool_name}** tool...",
292
+ "metadata": {"title": f"Tool: {tool_name}", "status": "pending"}
293
+ })
294
+
295
+ result = await self.session.call_tool(tool_name, tool_args)
296
+ result_content = result.content
297
+ if isinstance(result_content, list):
298
+ result_content = "\n".join(str(item) for item in result_content)
299
+
300
+ result_messages.append({
301
+ "role": "assistant",
302
+ "content": self._format_tool_result(result_content),
303
+ "metadata": {"title": "Tool Result", "status": "done"}
304
+ })
305
+
306
+ return result_messages
307
+
308
+ async def _process_mistral_response(self, response, claude_messages):
309
+ result_messages = []
310
+ message = response.choices[0].message
311
+
312
+ if message.content:
313
+ result_messages.append({"role": "assistant", "content": message.content})
314
+
315
  return result_messages
316
+
317
+ def _format_tool_result(self, result_content: str) -> str:
318
+ """Format tool result for display"""
319
+ try:
320
+ result_json = json.loads(result_content)
321
+ if isinstance(result_json, dict) and result_json.get("type") == "success":
322
+ station_code = result_json.get("station_code", "Unknown")
323
+ weather_data = result_json.get("data", {})
324
+
325
+ formatted_response = f"## 🌤️ Weather Data for Station: {station_code}\n\n"
326
+
327
+ if isinstance(weather_data, dict) and "reports" in weather_data:
328
+ reports = weather_data["reports"]
329
+ if isinstance(reports, list) and len(reports) > 0:
330
+ formatted_response += f"**Found {len(reports)} weather reports**\n\n"
331
+ for i, report in enumerate(reports[:3]):
332
+ if isinstance(report, dict):
333
+ timestamp = report.get("timestamp", "Unknown time")
334
+ temperature = report.get("temperature", "N/A")
335
+ humidity = report.get("humidity", "N/A")
336
+ formatted_response += f"**Report {i+1}** ({timestamp}):\n"
337
+ formatted_response += f"- Temperature: {temperature}\n"
338
+ formatted_response += f"- Humidity: {humidity}\n\n"
339
+
340
+ if len(reports) > 3:
341
+ formatted_response += f"... and {len(reports) - 3} more reports\n\n"
342
+
343
+ return formatted_response
344
+
345
+ elif isinstance(result_json, dict) and result_json.get("type") == "error":
346
+ error_msg = result_json.get("message", "Unknown error")
347
+ return f"## ❌ Error\n\n{error_msg}"
348
+
349
+ return f"```json\n{json.dumps(result_json, indent=2)}\n```"
350
+
351
+ except json.JSONDecodeError:
352
+ return f"```\n{result_content}\n```"
353
 
354
  client = MCPClientWrapper()
355
 
356
  def gradio_interface():
357
  with gr.Blocks(title="MCP LEXICON", theme=gr.themes.Soft()) as demo:
358
+ gr.Markdown("# 🌤️ LEXICON CHATBOT - Multi-LLM Weather Assistant")
359
  gr.Markdown(
360
+ "Ask me about weather data from any weather station! Upload files for additional context. "
361
+ "I can fetch hourly reports, help you explore weather patterns, and answer questions about specific stations."
 
362
  )
363
 
364
+ # Settings row
365
+ with gr.Row():
366
+ # LLM Provider selection
367
+ available_providers = client.get_available_providers()
368
+ if available_providers:
369
+ provider_choice = gr.Dropdown(
370
+ choices=available_providers,
371
+ value=client.current_provider if client.current_provider in available_providers else available_providers[0],
372
+ label="🤖 LLM Provider",
373
+ scale=1
374
+ )
375
+ else:
376
+ provider_choice = gr.Dropdown(
377
+ choices=["No API keys found"],
378
+ value="No API keys found",
379
+ label="🤖 LLM Provider",
380
+ scale=1,
381
+ interactive=False
382
+ )
383
+
384
+ # Connection status
385
+ status = gr.Textbox(
386
+ label="🔌 MCP Connection",
387
+ interactive=False,
388
+ value="🔄 Connecting...",
389
+ scale=2
390
+ )
391
 
392
  # Main chat interface
393
  chatbot = gr.Chatbot(
 
399
  bubble_full_width=False
400
  )
401
 
402
+ # Input row with file upload
403
+ with gr.Row():
404
+ with gr.Column(scale=4):
405
+ msg = gr.Textbox(
406
+ label="💬 Message",
407
+ placeholder="Ask about weather data or upload files for context...",
408
+ lines=2
409
+ )
410
+ with gr.Column(scale=2):
411
+ file_upload = gr.File(
412
+ label="📎 Upload Files",
413
+ file_count="multiple",
414
+ file_types=[".txt", ".pdf", ".csv", ".json", ".md", ".py", ".js", ".html", ".xml", ".yaml"]
415
+ )
416
 
417
+ # Control buttons
418
  with gr.Row():
419
+ submit_btn = gr.Button("Send", variant="primary", scale=2)
420
+ clear_btn = gr.Button("🗑️ Clear", scale=1)
421
+ reconnect_btn = gr.Button("🔄 Reconnect MCP", scale=1)
422
+
423
+ # Example queries
424
+ with gr.Accordion("💡 Example Queries", open=False):
425
  gr.Examples(
426
  examples=[
427
  "What weather stations are available?",
428
  "Get weather data for station ABC123",
429
+ "Show me the latest reports for NYC001",
430
+ "Compare weather patterns between two stations",
431
+ "Analyze the uploaded CSV data with weather information"
432
  ],
433
+ inputs=msg
 
434
  )
435
 
436
+ # Event handlers
437
  def auto_connect():
438
  return client.connect()
439
 
440
+ def change_provider(provider):
441
+ if provider != "No API keys found":
442
+ result = client.set_provider(provider)
443
+ return result
444
+ return "❌ No valid provider selected"
445
+
446
+ def process_and_clear(message, files, history):
447
+ if not message.strip() and not files:
448
+ return history, "", None
449
+ return client.process_message(message, files, history)
450
+
451
+ # Setup events
452
  demo.load(auto_connect, outputs=status)
453
+
454
+ provider_choice.change(
455
+ change_provider,
456
+ inputs=provider_choice,
457
+ outputs=gr.Textbox(visible=False) # Hidden output for status
458
+ )
459
+
460
+ submit_btn.click(
461
+ process_and_clear,
462
+ inputs=[msg, file_upload, chatbot],
463
+ outputs=[chatbot, msg, file_upload]
464
+ )
465
+
466
+ msg.submit(
467
+ process_and_clear,
468
+ inputs=[msg, file_upload, chatbot],
469
+ outputs=[chatbot, msg, file_upload]
470
+ )
471
+
472
+ clear_btn.click(lambda: ([], "", None), outputs=[chatbot, msg, file_upload])
473
  reconnect_btn.click(auto_connect, outputs=status)
474
 
475
  return demo
476
 
477
  if __name__ == "__main__":
478
+ # Check for API keys
479
+ api_keys = {
480
+ "ANTHROPIC_API_KEY": "Claude",
481
+ "OPENAI_API_KEY": "OpenAI",
482
+ "MISTRAL_API_KEY": "Mistral",
483
+ "LLAMAINDEX_API_KEY": "Llama"
484
+ }
485
+
486
+ found_keys = []
487
+ for key, name in api_keys.items():
488
+ if os.getenv(key):
489
+ found_keys.append(name)
490
+
491
+ if found_keys:
492
+ print(f"🔑 Found API keys for: {', '.join(found_keys)}")
493
+ else:
494
+ print("⚠️ Warning: No API keys found in environment.")
495
+ print("Please set them in your .env file:")
496
+ for key in api_keys.keys():
497
+ print(f"{key}=your_key_here")
498
 
499
+ print("🚀 Starting MCP Multi-LLM Weather Client...")
500
+ print("🔡 Will auto-connect to gradio_mcp_server.py")
501
+ print("🌐 Weather API endpoint: https://lexicon.osfarm.org/weather/stations")
502
 
503
  interface = gradio_interface()
504
  interface.launch(debug=True, share=True)
gradio_mcp_server.py CHANGED
@@ -7,108 +7,226 @@ import requests
7
  sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
8
  sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
9
 
10
- mcp = FastMCP("weather_api_server")
11
 
12
- @mcp.tool()
13
- async def get_weather_data(station_code: str, page: int = 1, start: str = None, end: str = None) -> str:
14
- """Get hourly weather reports from a weather station.
15
-
16
- Args:
17
- station_code: The weather station code/ID to fetch data from
18
- page: Page number for pagination (default: 1)
19
- start: Start date/time filter in ISO format (optional, e.g., '2024-01-01T00:00:00Z')
20
- end: End date/time filter in ISO format (optional, e.g., '2024-01-31T23:59:59Z')
21
-
22
- Returns:
23
- JSON string containing weather data or error information
24
- """
25
- base_url = "https://lexicon.osfarm.org/weather/stations"
26
- url = f"{base_url}/{station_code}/hourly-reports.json"
27
-
28
- params = {
29
- "page": page,
30
- "start": start if start else "null",
31
- "end": end if end else "null"
32
- }
33
 
34
  try:
35
  response = requests.get(url, params=params, timeout=30)
36
  response.raise_for_status()
37
 
38
- weather_data = response.json()
 
 
 
 
 
 
 
39
 
40
  return json.dumps({
41
  "type": "success",
42
- "station_code": station_code,
43
- "data": weather_data,
44
- "request_params": params,
45
- "message": f"Successfully retrieved weather data for station {station_code}"
46
  }, indent=2)
47
 
48
  except requests.exceptions.ConnectionError:
49
  return json.dumps({
50
  "type": "error",
51
- "station_code": station_code,
52
- "message": f"Could not connect to weather API at localhost:8888. Please ensure the weather service is running."
53
  })
54
  except requests.exceptions.Timeout:
55
  return json.dumps({
56
  "type": "error",
57
- "station_code": station_code,
58
- "message": f"Request timed out while fetching data for station {station_code}"
59
  })
60
  except requests.exceptions.HTTPError as e:
61
  status_code = e.response.status_code if e.response else "unknown"
62
  return json.dumps({
63
  "type": "error",
64
- "station_code": station_code,
65
  "status_code": status_code,
66
- "message": f"HTTP error {status_code} when fetching data for station {station_code}. Station may not exist or API may be unavailable."
67
  })
68
  except json.JSONDecodeError:
69
  return json.dumps({
70
  "type": "error",
71
- "station_code": station_code,
72
- "message": f"Invalid JSON response received from weather API for station {station_code}"
73
  })
74
  except Exception as e:
75
  return json.dumps({
76
  "type": "error",
77
- "station_code": station_code,
78
- "message": f"Unexpected error fetching weather data: {str(e)}"
79
  })
80
 
 
 
 
 
 
 
81
  @mcp.tool()
82
- async def list_available_stations() -> str:
83
- """Get a list of available weather stations from the API.
84
 
85
- Returns:
86
- JSON string containing available stations or error information
87
  """
88
- base_url = "http://localhost:8888/weather/stations"
 
 
 
 
89
 
90
- try:
91
- response = requests.get(base_url, timeout=30)
92
- response.raise_for_status()
93
-
94
- stations_data = response.json()
95
-
96
- return json.dumps({
97
- "type": "success",
98
- "data": stations_data,
99
- "message": "Successfully retrieved list of available weather stations"
100
- }, indent=2)
101
-
102
- except requests.exceptions.ConnectionError:
103
- return json.dumps({
104
- "type": "error",
105
- "message": "Could not connect to weather API at localhost:8888. Please ensure the weather service is running."
106
- })
107
- except Exception as e:
108
- return json.dumps({
109
- "type": "error",
110
- "message": f"Error fetching station list: {str(e)}"
111
- })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
  if __name__ == "__main__":
114
  mcp.run(transport='stdio')
 
7
  sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
8
  sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
9
 
10
+ mcp = FastMCP("lexicon_api_server")
11
 
12
+ BASE_URL = "https://lexicon.osfarm.org"
13
+
14
+ def make_api_request(endpoint: str, params: dict = None) -> str:
15
+ """Helper function to make API requests with consistent error handling"""
16
+ url = f"{BASE_URL}{endpoint}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
  try:
19
  response = requests.get(url, params=params, timeout=30)
20
  response.raise_for_status()
21
 
22
+ # Check if response is GeoJSON or regular JSON
23
+ content_type = response.headers.get('content-type', '')
24
+ if 'application/geo+json' in content_type or endpoint.endswith('.geojson'):
25
+ data_type = "geojson"
26
+ else:
27
+ data_type = "json"
28
+
29
+ data = response.json()
30
 
31
  return json.dumps({
32
  "type": "success",
33
+ "data_type": data_type,
34
+ "endpoint": endpoint,
35
+ "data": data,
36
+ "message": f"Successfully retrieved data from {endpoint}"
37
  }, indent=2)
38
 
39
  except requests.exceptions.ConnectionError:
40
  return json.dumps({
41
  "type": "error",
42
+ "endpoint": endpoint,
43
+ "message": f"Could not connect to API. Please ensure the service is running."
44
  })
45
  except requests.exceptions.Timeout:
46
  return json.dumps({
47
  "type": "error",
48
+ "endpoint": endpoint,
49
+ "message": f"Request timed out for {endpoint}"
50
  })
51
  except requests.exceptions.HTTPError as e:
52
  status_code = e.response.status_code if e.response else "unknown"
53
  return json.dumps({
54
  "type": "error",
55
+ "endpoint": endpoint,
56
  "status_code": status_code,
57
+ "message": f"HTTP error {status_code} for {endpoint}. Resource may not exist or API may be unavailable."
58
  })
59
  except json.JSONDecodeError:
60
  return json.dumps({
61
  "type": "error",
62
+ "endpoint": endpoint,
63
+ "message": f"Invalid JSON response from {endpoint}"
64
  })
65
  except Exception as e:
66
  return json.dumps({
67
  "type": "error",
68
+ "endpoint": endpoint,
69
+ "message": f"Unexpected error: {str(e)}"
70
  })
71
 
72
+ # WEATHER TOOLS
73
+ @mcp.tool()
74
+ async def get_weather_stations() -> str:
75
+ """Get a list of all available weather stations"""
76
+ return make_api_request("/weather/stations.json")
77
+
78
  @mcp.tool()
79
+ async def get_weather_station(station_code: str) -> str:
80
+ """Get details of a specific weather station
81
 
82
+ Args:
83
+ station_code: The weather station code/ID
84
  """
85
+ return make_api_request(f"/weather/stations/{station_code}.json")
86
+
87
+ @mcp.tool()
88
+ async def get_weather_data(station_code: str, page: int = 1, start: str = None, end: str = None) -> str:
89
+ """Get hourly weather reports from a weather station.
90
 
91
+ Args:
92
+ station_code: The weather station code/ID to fetch data from
93
+ page: Page number for pagination (default: 1)
94
+ start: Start date/time filter in ISO format (optional, e.g., '2024-01-01T00:00:00Z')
95
+ end: End date/time filter in ISO format (optional, e.g., '2024-01-31T23:59:59Z')
96
+ """
97
+ params = {"page": page}
98
+ if start:
99
+ params["start"] = start
100
+ if end:
101
+ params["end"] = end
102
+
103
+ return make_api_request(f"/weather/stations/{station_code}/hourly-reports.json", params)
104
+
105
+ # TOOLS
106
+ @mcp.tool()
107
+ async def get_parcel_identifier_json() -> str:
108
+ """Get parcel identifier tool data in JSON format"""
109
+ return make_api_request("/tools/parcel-identifier.json")
110
+
111
+ @mcp.tool()
112
+ async def get_parcel_identifier_geojson() -> str:
113
+ """Get parcel identifier tool data in GeoJSON format"""
114
+ return make_api_request("/tools/parcel-identifier.geojson")
115
+
116
+ # GEOGRAPHICAL REFERENCES - CADASTRAL PARCELS
117
+ @mcp.tool()
118
+ async def get_cadastral_parcels() -> str:
119
+ """Get list of all cadastral parcels"""
120
+ return make_api_request("/geographical-references/cadastral-parcels.json")
121
+
122
+ @mcp.tool()
123
+ async def get_cadastral_parcel(parcel_id: str) -> str:
124
+ """Get details of a specific cadastral parcel
125
+
126
+ Args:
127
+ parcel_id: The cadastral parcel ID
128
+ """
129
+ return make_api_request(f"/geographical-references/cadastral-parcels/{parcel_id}.json")
130
+
131
+ @mcp.tool()
132
+ async def get_cadastral_parcel_geolocation(parcel_id: str) -> str:
133
+ """Get geolocation data for a cadastral parcel in GeoJSON format
134
+
135
+ Args:
136
+ parcel_id: The cadastral parcel ID
137
+ """
138
+ return make_api_request(f"/geographical-references/cadastral-parcels/{parcel_id}/geolocation.geojson")
139
+
140
+ # GEOGRAPHICAL REFERENCES - CAP PARCELS
141
+ @mcp.tool()
142
+ async def get_cap_parcels() -> str:
143
+ """Get list of all CAP (Common Agricultural Policy) parcels"""
144
+ return make_api_request("/geographical-references/cap-parcels.json")
145
+
146
+ @mcp.tool()
147
+ async def get_cap_parcel(cap_id: str) -> str:
148
+ """Get details of a specific CAP parcel
149
+
150
+ Args:
151
+ cap_id: The CAP parcel ID
152
+ """
153
+ return make_api_request(f"/geographical-references/cap-parcels/{cap_id}.json")
154
+
155
+ @mcp.tool()
156
+ async def get_cap_parcel_geolocation(cap_id: str) -> str:
157
+ """Get geolocation data for a CAP parcel in GeoJSON format
158
+
159
+ Args:
160
+ cap_id: The CAP parcel ID
161
+ """
162
+ return make_api_request(f"/geographical-references/cap-parcels/{cap_id}/geolocation.geojson")
163
+
164
+ # GEOGRAPHICAL REFERENCES - MUNICIPALITIES
165
+ @mcp.tool()
166
+ async def get_municipalities() -> str:
167
+ """Get list of all municipalities"""
168
+ return make_api_request("/geographical-references/municipalities.json")
169
+
170
+ @mcp.tool()
171
+ async def get_municipality(municipality_id: str) -> str:
172
+ """Get details of a specific municipality
173
+
174
+ Args:
175
+ municipality_id: The municipality ID
176
+ """
177
+ return make_api_request(f"/geographical-references/municipalities/{municipality_id}.json")
178
+
179
+ @mcp.tool()
180
+ async def get_municipality_cadastre(municipality_id: str) -> str:
181
+ """Get cadastre data for a municipality in GeoJSON format
182
+
183
+ Args:
184
+ municipality_id: The municipality ID
185
+ """
186
+ return make_api_request(f"/geographical-references/municipalities/{municipality_id}/cadastre.geojson")
187
+
188
+ @mcp.tool()
189
+ async def get_municipality_cap_parcels(municipality_id: str) -> str:
190
+ """Get CAP parcels geolocation data for a municipality in GeoJSON format
191
+
192
+ Args:
193
+ municipality_id: The municipality ID
194
+ """
195
+ return make_api_request(f"/geographical-references/municipalities/{municipality_id}/cap-parcels.geojson")
196
+
197
+ # PRODUCTION
198
+ @mcp.tool()
199
+ async def get_productions() -> str:
200
+ """Get list of all production data"""
201
+ return make_api_request("/production/productions.json")
202
+
203
+ # PHYTOSANITARY
204
+ @mcp.tool()
205
+ async def get_cropsets() -> str:
206
+ """Get list of all phytosanitary cropsets"""
207
+ return make_api_request("/phytosanitary/cropsets.json")
208
+
209
+ @mcp.tool()
210
+ async def get_phytosanitary_products() -> str:
211
+ """Get list of all phytosanitary products"""
212
+ return make_api_request("/phytosanitary/products.json")
213
+
214
+ @mcp.tool()
215
+ async def get_phytosanitary_symbols() -> str:
216
+ """Get list of all phytosanitary symbols"""
217
+ return make_api_request("/phytosanitary/symbols.json")
218
+
219
+ # SEEDS
220
+ @mcp.tool()
221
+ async def get_seed_varieties() -> str:
222
+ """Get list of all seed varieties"""
223
+ return make_api_request("/seeds/varieties.json")
224
+
225
+ # VITICULTURE
226
+ @mcp.tool()
227
+ async def get_vine_varieties() -> str:
228
+ """Get list of all vine varieties"""
229
+ return make_api_request("/viticulture/vine-varieties.json")
230
 
231
  if __name__ == "__main__":
232
  mcp.run(transport='stdio')
requirements.txt CHANGED
@@ -2,4 +2,5 @@ gradio[mcp]
2
  anthropic
3
  mcp
4
  openai
5
- mistralai
 
 
2
  anthropic
3
  mcp
4
  openai
5
+ mistralai==0.4.2
6
+ PyPDF2