serichard1 commited on
Commit
91d5f70
·
1 Parent(s): 58afe77

Add application file

Browse files
Files changed (4) hide show
  1. README.md +2 -2
  2. app.py +332 -0
  3. gradio_mcp_server.py +114 -0
  4. requirements.txt +1 -0
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
  title: Chatbot Hackathon
3
- emoji: 🌍
4
  colorFrom: pink
5
- colorTo: blue
6
  sdk: gradio
7
  sdk_version: 5.46.0
8
  app_file: app.py
 
1
  ---
2
  title: Chatbot Hackathon
3
+ emoji: 📈
4
  colorFrom: pink
5
+ colorTo: yellow
6
  sdk: gradio
7
  sdk_version: 5.46.0
8
  app_file: app.py
app.py ADDED
@@ -0,0 +1,332 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import os
3
+ import json
4
+ from typing import List, Dict, Any, Union
5
+ from contextlib import AsyncExitStack
6
+
7
+ import gradio as gr
8
+ from gradio.components.chatbot import ChatMessage
9
+ from mcp import ClientSession, StdioServerParameters
10
+ from mcp.client.stdio import stdio_client
11
+ from anthropic import Anthropic
12
+ from dotenv import load_dotenv
13
+
14
+ load_dotenv()
15
+
16
+ loop = asyncio.new_event_loop()
17
+ asyncio.set_event_loop(loop)
18
+
19
+ class MCPClientWrapper:
20
+ def __init__(self):
21
+ self.session = None
22
+ self.exit_stack = None
23
+ self.anthropic = Anthropic()
24
+ self.tools = []
25
+ self.connected = False
26
+
27
+ def connect(self) -> str:
28
+ return loop.run_until_complete(self._connect())
29
+
30
+ async def _connect(self) -> str:
31
+ if self.exit_stack:
32
+ await self.exit_stack.aclose()
33
+
34
+ self.exit_stack = AsyncExitStack()
35
+
36
+ server_path = "gradio_mcp_server.py"
37
+
38
+ server_params = StdioServerParameters(
39
+ command="python",
40
+ args=[server_path],
41
+ env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"}
42
+ )
43
+
44
+ try:
45
+ stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
46
+ self.stdio, self.write = stdio_transport
47
+
48
+ self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
49
+ await self.session.initialize()
50
+
51
+ response = await self.session.list_tools()
52
+ self.tools = [{
53
+ "name": tool.name,
54
+ "description": tool.description,
55
+ "input_schema": tool.inputSchema
56
+ } for tool in response.tools]
57
+
58
+ self.connected = True
59
+ tool_names = [tool["name"] for tool in self.tools]
60
+ return f"✅ Connected to MCP Weather Server. Available tools: {', '.join(tool_names)}"
61
+ except Exception as e:
62
+ self.connected = False
63
+ return f"❌ Failed to connect to MCP server: {str(e)}"
64
+
65
+ def process_message(self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]) -> tuple:
66
+ if not self.session or not self.connected:
67
+ return history + [
68
+ {"role": "user", "content": message},
69
+ {"role": "assistant", "content": "❌ MCP weather server is not connected. Please check the connection status above."}
70
+ ], gr.Textbox(value="")
71
+
72
+ new_messages = loop.run_until_complete(self._process_query(message, history))
73
+ return history + [{"role": "user", "content": message}] + new_messages, gr.Textbox(value="")
74
+
75
+ async def _process_query(self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]):
76
+ claude_messages = []
77
+ for msg in history:
78
+ if isinstance(msg, ChatMessage):
79
+ role, content = msg.role, msg.content
80
+ else:
81
+ role, content = msg.get("role"), msg.get("content")
82
+
83
+ if role in ["user", "assistant", "system"]:
84
+ claude_messages.append({"role": role, "content": content})
85
+
86
+ claude_messages.append({"role": "user", "content": message})
87
+
88
+ response = self.anthropic.messages.create(
89
+ model="claude-3-5-sonnet-20241022",
90
+ max_tokens=1500,
91
+ messages=claude_messages,
92
+ tools=self.tools
93
+ )
94
+
95
+ result_messages = []
96
+
97
+ for content in response.content:
98
+ if content.type == 'text':
99
+ result_messages.append({
100
+ "role": "assistant",
101
+ "content": content.text
102
+ })
103
+
104
+ elif content.type == 'tool_use':
105
+ tool_name = content.name
106
+ tool_args = content.input
107
+
108
+ result_messages.append({
109
+ "role": "assistant",
110
+ "content": f"🔧 I'll use the **{tool_name}** tool to fetch the weather data you requested.",
111
+ "metadata": {
112
+ "title": f"Using tool: {tool_name}",
113
+ "log": f"Parameters: {json.dumps(tool_args, ensure_ascii=True)}",
114
+ "status": "pending",
115
+ "id": f"tool_call_{tool_name}"
116
+ }
117
+ })
118
+
119
+ result_messages.append({
120
+ "role": "assistant",
121
+ "content": "```json\n" + json.dumps(tool_args, indent=2, ensure_ascii=True) + "\n```",
122
+ "metadata": {
123
+ "parent_id": f"tool_call_{tool_name}",
124
+ "id": f"params_{tool_name}",
125
+ "title": "Tool Parameters"
126
+ }
127
+ })
128
+
129
+ result = await self.session.call_tool(tool_name, tool_args)
130
+
131
+ if result_messages and "metadata" in result_messages[-2]:
132
+ result_messages[-2]["metadata"]["status"] = "done"
133
+
134
+ result_content = result.content
135
+ if isinstance(result_content, list):
136
+ result_content = "\n".join(str(item) for item in result_content)
137
+
138
+ # Parse and format the weather data response
139
+ try:
140
+ result_json = json.loads(result_content)
141
+
142
+ if isinstance(result_json, dict):
143
+ if result_json.get("type") == "success":
144
+ # Format successful weather data response
145
+ station_code = result_json.get("station_code", "Unknown")
146
+ weather_data = result_json.get("data", {})
147
+
148
+ # Create a nicely formatted response
149
+ formatted_response = f"## 🌤️ Weather Data for Station: {station_code}\n\n"
150
+
151
+ if isinstance(weather_data, dict):
152
+ # Show key weather information if available
153
+ if "reports" in weather_data:
154
+ reports = weather_data["reports"]
155
+ if isinstance(reports, list) and len(reports) > 0:
156
+ formatted_response += f"**Found {len(reports)} weather reports**\n\n"
157
+ # Show first few reports as example
158
+ for i, report in enumerate(reports[:3]):
159
+ if isinstance(report, dict):
160
+ timestamp = report.get("timestamp", "Unknown time")
161
+ temperature = report.get("temperature", "N/A")
162
+ humidity = report.get("humidity", "N/A")
163
+ formatted_response += f"**Report {i+1}** ({timestamp}):\n"
164
+ formatted_response += f"- Temperature: {temperature}\n"
165
+ formatted_response += f"- Humidity: {humidity}\n\n"
166
+
167
+ if len(reports) > 3:
168
+ formatted_response += f"... and {len(reports) - 3} more reports\n\n"
169
+
170
+ formatted_response += "**Raw Data:**\n```json\n" + json.dumps(weather_data, indent=2) + "\n```"
171
+ else:
172
+ formatted_response += "**Raw Data:**\n```json\n" + json.dumps(weather_data, indent=2) + "\n```"
173
+
174
+ result_messages.append({
175
+ "role": "assistant",
176
+ "content": formatted_response,
177
+ "metadata": {
178
+ "title": f"Weather Data Retrieved",
179
+ "status": "done",
180
+ "id": f"success_result_{tool_name}"
181
+ }
182
+ })
183
+
184
+ elif result_json.get("type") == "error":
185
+ # Format error response
186
+ error_msg = result_json.get("message", "Unknown error occurred")
187
+ station_code = result_json.get("station_code", "Unknown")
188
+
189
+ error_response = f"## ❌ Error Fetching Weather Data\n\n"
190
+ error_response += f"**Station:** {station_code}\n"
191
+ error_response += f"**Error:** {error_msg}\n\n"
192
+ error_response += "**Suggestions:**\n"
193
+ error_response += "- Check if the station code is correct\n"
194
+ error_response += "- Ensure the weather API service is running on localhost:8888\n"
195
+ error_response += "- Try a different station code\n"
196
+
197
+ result_messages.append({
198
+ "role": "assistant",
199
+ "content": error_response,
200
+ "metadata": {
201
+ "title": "Weather API Error",
202
+ "status": "error",
203
+ "id": f"error_result_{tool_name}"
204
+ }
205
+ })
206
+ else:
207
+ # Unknown response format
208
+ result_messages.append({
209
+ "role": "assistant",
210
+ "content": "```json\n" + result_content + "\n```",
211
+ "metadata": {
212
+ "title": "Raw Tool Response",
213
+ "status": "done",
214
+ "id": f"raw_result_{tool_name}"
215
+ }
216
+ })
217
+ else:
218
+ result_messages.append({
219
+ "role": "assistant",
220
+ "content": "```\n" + result_content + "\n```",
221
+ "metadata": {
222
+ "title": "Raw Tool Response",
223
+ "status": "done",
224
+ "id": f"raw_result_{tool_name}"
225
+ }
226
+ })
227
+
228
+ except json.JSONDecodeError:
229
+ result_messages.append({
230
+ "role": "assistant",
231
+ "content": "```\n" + result_content + "\n```",
232
+ "metadata": {
233
+ "title": "Raw Tool Response",
234
+ "status": "done",
235
+ "id": f"raw_result_{tool_name}"
236
+ }
237
+ })
238
+
239
+ # Let Claude analyze and respond to the weather data
240
+ claude_messages.append({"role": "user", "content": f"Tool result for {tool_name}: {result_content}"})
241
+ next_response = self.anthropic.messages.create(
242
+ model="claude-3-5-sonnet-20241022",
243
+ max_tokens=1500,
244
+ messages=claude_messages,
245
+ )
246
+
247
+ if next_response.content and next_response.content[0].type == 'text':
248
+ result_messages.append({
249
+ "role": "assistant",
250
+ "content": next_response.content[0].text
251
+ })
252
+
253
+ return result_messages
254
+
255
+ client = MCPClientWrapper()
256
+
257
+ def gradio_interface():
258
+ with gr.Blocks(title="MCP Weather Client", theme=gr.themes.Soft()) as demo:
259
+ gr.Markdown("# 🌤️ Weather Station Assistant")
260
+ gr.Markdown(
261
+ "Ask me about weather data from any weather station! I can fetch hourly reports, "
262
+ "help you explore weather patterns, and answer questions about specific stations. "
263
+ "Just ask naturally - for example: *'Get weather data for station ABC123'* or *'What stations are available?'*"
264
+ )
265
+
266
+ # Connection status (auto-updates on load)
267
+ status = gr.Textbox(
268
+ label="🔌 Connection Status",
269
+ interactive=False,
270
+ value="🔄 Connecting to weather server..."
271
+ )
272
+
273
+ # Main chat interface
274
+ chatbot = gr.Chatbot(
275
+ value=[],
276
+ height=600,
277
+ type="messages",
278
+ show_copy_button=True,
279
+ avatar_images=("👤", "🤖"),
280
+ bubble_full_width=False
281
+ )
282
+
283
+ # Input row
284
+ with gr.Row(equal_height=True):
285
+ msg = gr.Textbox(
286
+ label="💬 Ask about weather data",
287
+ placeholder="e.g., 'Get weather data for station NYC001' or 'Show me available weather stations' or 'What's the latest data from station LAX123?'",
288
+ scale=4
289
+ )
290
+ with gr.Column(scale=1):
291
+ clear_btn = gr.Button("🗑️ Clear Chat", size="lg")
292
+ reconnect_btn = gr.Button("🔄 Reconnect", size="lg")
293
+
294
+ # Example queries
295
+ with gr.Row():
296
+ gr.Examples(
297
+ examples=[
298
+ "What weather stations are available?",
299
+ "Get weather data for station ABC123",
300
+ "Show me the latest hourly reports for station NYC001",
301
+ "Get weather data for station LAX789 from page 2",
302
+ "Fetch weather data for station CHI456 between 2024-01-01 and 2024-01-31"
303
+ ],
304
+ inputs=msg,
305
+ label="💡 Example Queries"
306
+ )
307
+
308
+ # Auto-connect when the interface loads
309
+ def auto_connect():
310
+ return client.connect()
311
+
312
+ # Event handlers
313
+ demo.load(auto_connect, outputs=status)
314
+ msg.submit(client.process_message, [msg, chatbot], [chatbot, msg])
315
+ clear_btn.click(lambda: [], None, chatbot)
316
+ reconnect_btn.click(auto_connect, outputs=status)
317
+
318
+ return demo
319
+
320
+ if __name__ == "__main__":
321
+ if not os.getenv("ANTHROPIC_API_KEY"):
322
+ print("⚠️ Warning: ANTHROPIC_API_KEY not found in environment.")
323
+ print("Please set it in your .env file or environment variables.")
324
+ print("Example .env file content:")
325
+ print("ANTHROPIC_API_KEY=your_api_key_here")
326
+
327
+ print("🚀 Starting MCP Weather Client...")
328
+ print("📡 Will auto-connect to gradio_mcp_server.py")
329
+ print("🌐 Weather API endpoint: http://localhost:8888/weather/stations")
330
+
331
+ interface = gradio_interface()
332
+ interface.launch(debug=True, share=True)
gradio_mcp_server.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from mcp.server.fastmcp import FastMCP
2
+ import json
3
+ import sys
4
+ import io
5
+ import requests
6
+
7
+ sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
8
+ sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
9
+
10
+ mcp = FastMCP("weather_api_server")
11
+
12
+ @mcp.tool()
13
+ async def get_weather_data(station_code: str, page: int = 1, start: str = None, end: str = None) -> str:
14
+ """Get hourly weather reports from a weather station.
15
+
16
+ Args:
17
+ station_code: The weather station code/ID to fetch data from
18
+ page: Page number for pagination (default: 1)
19
+ start: Start date/time filter in ISO format (optional, e.g., '2024-01-01T00:00:00Z')
20
+ end: End date/time filter in ISO format (optional, e.g., '2024-01-31T23:59:59Z')
21
+
22
+ Returns:
23
+ JSON string containing weather data or error information
24
+ """
25
+ base_url = "http://localhost:8888/weather/stations"
26
+ url = f"{base_url}/{station_code}/hourly-reports.json"
27
+
28
+ params = {
29
+ "page": page,
30
+ "start": start if start else "null",
31
+ "end": end if end else "null"
32
+ }
33
+
34
+ try:
35
+ response = requests.get(url, params=params, timeout=30)
36
+ response.raise_for_status()
37
+
38
+ weather_data = response.json()
39
+
40
+ return json.dumps({
41
+ "type": "success",
42
+ "station_code": station_code,
43
+ "data": weather_data,
44
+ "request_params": params,
45
+ "message": f"Successfully retrieved weather data for station {station_code}"
46
+ }, indent=2)
47
+
48
+ except requests.exceptions.ConnectionError:
49
+ return json.dumps({
50
+ "type": "error",
51
+ "station_code": station_code,
52
+ "message": f"Could not connect to weather API at localhost:8888. Please ensure the weather service is running."
53
+ })
54
+ except requests.exceptions.Timeout:
55
+ return json.dumps({
56
+ "type": "error",
57
+ "station_code": station_code,
58
+ "message": f"Request timed out while fetching data for station {station_code}"
59
+ })
60
+ except requests.exceptions.HTTPError as e:
61
+ status_code = e.response.status_code if e.response else "unknown"
62
+ return json.dumps({
63
+ "type": "error",
64
+ "station_code": station_code,
65
+ "status_code": status_code,
66
+ "message": f"HTTP error {status_code} when fetching data for station {station_code}. Station may not exist or API may be unavailable."
67
+ })
68
+ except json.JSONDecodeError:
69
+ return json.dumps({
70
+ "type": "error",
71
+ "station_code": station_code,
72
+ "message": f"Invalid JSON response received from weather API for station {station_code}"
73
+ })
74
+ except Exception as e:
75
+ return json.dumps({
76
+ "type": "error",
77
+ "station_code": station_code,
78
+ "message": f"Unexpected error fetching weather data: {str(e)}"
79
+ })
80
+
81
+ @mcp.tool()
82
+ async def list_available_stations() -> str:
83
+ """Get a list of available weather stations from the API.
84
+
85
+ Returns:
86
+ JSON string containing available stations or error information
87
+ """
88
+ base_url = "http://localhost:8888/weather/stations"
89
+
90
+ try:
91
+ response = requests.get(base_url, timeout=30)
92
+ response.raise_for_status()
93
+
94
+ stations_data = response.json()
95
+
96
+ return json.dumps({
97
+ "type": "success",
98
+ "data": stations_data,
99
+ "message": "Successfully retrieved list of available weather stations"
100
+ }, indent=2)
101
+
102
+ except requests.exceptions.ConnectionError:
103
+ return json.dumps({
104
+ "type": "error",
105
+ "message": "Could not connect to weather API at localhost:8888. Please ensure the weather service is running."
106
+ })
107
+ except Exception as e:
108
+ return json.dumps({
109
+ "type": "error",
110
+ "message": f"Error fetching station list: {str(e)}"
111
+ })
112
+
113
+ if __name__ == "__main__":
114
+ mcp.run(transport='stdio')
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ gradio[mcp]