ChrisSacrumCor commited on
Commit
ed83fb0
·
verified ·
1 Parent(s): 8863388

Fixed the FastAPI issue

Browse files
Files changed (1) hide show
  1. app.py +138 -42
app.py CHANGED
@@ -33,36 +33,56 @@ class MCPClient:
33
  self.conversation_history = []
34
 
35
  def call_mcp_server(self, server_key: str, endpoint: str, payload: Dict[str, Any]) -> Dict[str, Any]:
36
- """Make a call to an MCP server"""
37
  try:
38
  server_config = self.mcp_servers[server_key]
39
- url = f"{server_config['space_url']}/{endpoint}"
40
 
41
- response = requests.post(
42
- url,
43
- json=payload,
44
- headers={"Content-Type": "application/json"},
45
- timeout=30
46
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
  if response.status_code == 200:
49
  return {"success": True, "data": response.json()}
50
  else:
51
  return {
52
  "success": False,
53
- "error": f"HTTP {response.status_code}: {response.text}"
 
54
  }
55
 
56
  except requests.exceptions.RequestException as e:
57
- return {"success": False, "error": f"Request failed: {str(e)}"}
58
  except Exception as e:
59
- return {"success": False, "error": f"Unexpected error: {str(e)}"}
60
 
61
  def get_available_tools(self, server_key: str) -> List[Dict[str, Any]]:
62
  """Get available tools from an MCP server"""
63
  result = self.call_mcp_server(server_key, "tools/list", {})
64
  if result["success"]:
65
- return result["data"].get("tools", [])
 
 
 
 
 
66
  return []
67
 
68
  def execute_tool(self, server_key: str, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
@@ -78,10 +98,12 @@ class MCPClient:
78
  system_prompt = f"""
79
  You are an intelligent assistant that helps users interact with MCP (Model Context Protocol) servers.
80
 
81
- Available MCP servers:
82
- 1. Terraform: {self.mcp_servers['terraform']['description']}
83
- 2. Linux: {self.mcp_servers['linux']['description']}
84
- 3. Cisco: {self.mcp_servers['cisco']['description']}
 
 
85
 
86
  Analyze the user's request and determine:
87
  1. Which MCP server(s) would be most appropriate
@@ -90,7 +112,7 @@ class MCPClient:
90
 
91
  Respond in JSON format with:
92
  {{
93
- "recommended_server": "server_key",
94
  "reasoning": "explanation of why this server was chosen",
95
  "suggested_action": "what action to take",
96
  "parameters": {{"key": "value"}}
@@ -98,22 +120,45 @@ class MCPClient:
98
  """
99
 
100
  try:
101
- response = self.openai_client.chat.completions.create(
102
- model="gpt-4",
103
- messages=[
104
- {"role": "system", "content": system_prompt},
105
- {"role": "user", "content": user_input}
106
- ],
107
- temperature=0.3
108
- )
109
-
110
- return json.loads(response.choices[0].message.content)
111
-
112
- except Exception as e:
113
- return {
114
- "error": f"Failed to analyze request: {str(e)}",
115
- "recommended_server": None
116
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117
 
118
  def generate_response(self, user_input: str, mcp_results: Dict[str, Any] = None) -> str:
119
  """Generate a natural language response using OpenAI"""
@@ -163,7 +208,7 @@ class MCPClient:
163
  self.conversation_history.append({"role": "user", "content": user_input})
164
 
165
  try:
166
- # Analyze request if no server specified
167
  if not selected_server or selected_server == "Auto-detect":
168
  analysis = self.analyze_user_request(user_input)
169
  if "error" in analysis:
@@ -175,15 +220,38 @@ class MCPClient:
175
  reasoning = analysis.get("reasoning", "No reasoning provided")
176
  status = f"[{timestamp}] Selected: {self.mcp_servers[selected_server]['name']} - {reasoning}"
177
  else:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
  status = f"[{timestamp}] Using: {self.mcp_servers[selected_server]['name']}"
179
 
180
  # Get available tools for the selected server
181
  tools = self.get_available_tools(selected_server)
182
 
183
  if not tools:
184
- response = f"⚠️ No tools available from {self.mcp_servers[selected_server]['name']} server"
 
 
 
 
 
 
185
  self.conversation_history.append({"role": "assistant", "content": response})
186
- return response, status + " - No tools available"
187
 
188
  # For now, just show available tools and generate a helpful response
189
  # In a full implementation, you'd use OpenAI to select and execute appropriate tools
@@ -255,19 +323,47 @@ def create_gradio_interface():
255
  history.append([message, error_msg])
256
  return history, ""
257
 
258
- def get_server_status():
259
  status_info = "## MCP Servers Status\n\n"
260
- for key, config in mcp_client.mcp_servers.items() if mcp_client else []:
261
  try:
262
- # Simple health check
263
- response = requests.get(f"{config['space_url']}/health", timeout=5)
264
- status = "🟢 Online" if response.status_code == 200 else "🟡 Issues"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
265
  except:
266
  status = "🔴 Offline"
267
 
268
  status_info += f"**{config['name']}**: {status}\n"
269
  status_info += f"- URL: {config['space_url']}\n"
270
- status_info += f"- Description: {config['description']}\n\n"
 
 
 
 
 
 
 
 
 
 
 
271
 
272
  return status_info
273
 
 
33
  self.conversation_history = []
34
 
35
  def call_mcp_server(self, server_key: str, endpoint: str, payload: Dict[str, Any]) -> Dict[str, Any]:
36
+ """Make a call to an MCP server (FastAPI-based)"""
37
  try:
38
  server_config = self.mcp_servers[server_key]
 
39
 
40
+ # For FastAPI MCP servers, we need to use different endpoints
41
+ if endpoint == "tools/list":
42
+ url = f"{server_config['space_url']}/tools"
43
+ response = requests.get(url, timeout=30)
44
+ elif endpoint == "tools/call":
45
+ url = f"{server_config['space_url']}/tools/call"
46
+ response = requests.post(
47
+ url,
48
+ json=payload,
49
+ headers={"Content-Type": "application/json"},
50
+ timeout=30
51
+ )
52
+ else:
53
+ # Fallback to direct endpoint
54
+ url = f"{server_config['space_url']}/{endpoint}"
55
+ response = requests.post(
56
+ url,
57
+ json=payload,
58
+ headers={"Content-Type": "application/json"},
59
+ timeout=30
60
+ )
61
 
62
  if response.status_code == 200:
63
  return {"success": True, "data": response.json()}
64
  else:
65
  return {
66
  "success": False,
67
+ "error": f"HTTP {response.status_code}: {response.text}",
68
+ "server": server_config['name']
69
  }
70
 
71
  except requests.exceptions.RequestException as e:
72
+ return {"success": False, "error": f"Request failed: {str(e)}", "server": server_config['name']}
73
  except Exception as e:
74
+ return {"success": False, "error": f"Unexpected error: {str(e)}", "server": server_config['name']}
75
 
76
  def get_available_tools(self, server_key: str) -> List[Dict[str, Any]]:
77
  """Get available tools from an MCP server"""
78
  result = self.call_mcp_server(server_key, "tools/list", {})
79
  if result["success"]:
80
+ data = result["data"]
81
+ # Handle different response formats
82
+ if isinstance(data, dict):
83
+ return data.get("tools", data.get("result", []))
84
+ elif isinstance(data, list):
85
+ return data
86
  return []
87
 
88
  def execute_tool(self, server_key: str, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
 
98
  system_prompt = f"""
99
  You are an intelligent assistant that helps users interact with MCP (Model Context Protocol) servers.
100
 
101
+ Available MCP servers (use these EXACT keys in your response):
102
+ 1. terraform: {self.mcp_servers['terraform']['description']}
103
+ 2. linux: {self.mcp_servers['linux']['description']}
104
+ 3. cisco: {self.mcp_servers['cisco']['description']}
105
+
106
+ IMPORTANT: For the "recommended_server" field, you MUST use one of these exact keys: "terraform", "linux", or "cisco"
107
 
108
  Analyze the user's request and determine:
109
  1. Which MCP server(s) would be most appropriate
 
112
 
113
  Respond in JSON format with:
114
  {{
115
+ "recommended_server": "terraform|linux|cisco",
116
  "reasoning": "explanation of why this server was chosen",
117
  "suggested_action": "what action to take",
118
  "parameters": {{"key": "value"}}
 
120
  """
121
 
122
  try:
123
+ try:
124
+ response = self.openai_client.chat.completions.create(
125
+ model="gpt-4",
126
+ messages=[
127
+ {"role": "system", "content": system_prompt},
128
+ {"role": "user", "content": user_input}
129
+ ],
130
+ temperature=0.3
131
+ )
132
+
133
+ content = response.choices[0].message.content.strip()
134
+
135
+ # Try to parse JSON, handle cases where GPT returns non-JSON
136
+ try:
137
+ return json.loads(content)
138
+ except json.JSONDecodeError:
139
+ # If not valid JSON, extract server recommendation manually
140
+ content_lower = content.lower()
141
+ if "cisco" in content_lower:
142
+ recommended_server = "cisco"
143
+ elif "linux" in content_lower:
144
+ recommended_server = "linux"
145
+ elif "terraform" in content_lower:
146
+ recommended_server = "terraform"
147
+ else:
148
+ recommended_server = "cisco" # Default for network questions
149
+
150
+ return {
151
+ "recommended_server": recommended_server,
152
+ "reasoning": "Based on content analysis",
153
+ "suggested_action": content,
154
+ "parameters": {}
155
+ }
156
+
157
+ except Exception as e:
158
+ return {
159
+ "error": f"Failed to analyze request: {str(e)}",
160
+ "recommended_server": "cisco" # Default fallback
161
+ }
162
 
163
  def generate_response(self, user_input: str, mcp_results: Dict[str, Any] = None) -> str:
164
  """Generate a natural language response using OpenAI"""
 
208
  self.conversation_history.append({"role": "user", "content": user_input})
209
 
210
  try:
211
+ # Analyze request if no server specified or auto-detect selected
212
  if not selected_server or selected_server == "Auto-detect":
213
  analysis = self.analyze_user_request(user_input)
214
  if "error" in analysis:
 
220
  reasoning = analysis.get("reasoning", "No reasoning provided")
221
  status = f"[{timestamp}] Selected: {self.mcp_servers[selected_server]['name']} - {reasoning}"
222
  else:
223
+ # Map display names back to keys if needed
224
+ server_mapping = {
225
+ "MCP Terraform": "terraform",
226
+ "MCP Linux": "linux",
227
+ "MCP Cisco": "cisco",
228
+ "Terraform": "terraform",
229
+ "Linux": "linux",
230
+ "Cisco": "cisco"
231
+ }
232
+ selected_server = server_mapping.get(selected_server, selected_server.lower())
233
+
234
+ # Validate server key
235
+ if selected_server not in self.mcp_servers:
236
+ response = f"❌ Invalid server selection: {selected_server}. Available: {', '.join(self.mcp_servers.keys())}"
237
+ self.conversation_history.append({"role": "assistant", "content": response})
238
+ return response, f"[{timestamp}] Invalid server"
239
+
240
  status = f"[{timestamp}] Using: {self.mcp_servers[selected_server]['name']}"
241
 
242
  # Get available tools for the selected server
243
  tools = self.get_available_tools(selected_server)
244
 
245
  if not tools:
246
+ # Try to get more info about the server
247
+ server_check = self.call_mcp_server(selected_server, "", {})
248
+ if not server_check["success"]:
249
+ response = f"⚠️ Cannot connect to {self.mcp_servers[selected_server]['name']} server: {server_check.get('error', 'Unknown error')}"
250
+ else:
251
+ response = f"⚠️ No tools available from {self.mcp_servers[selected_server]['name']} server (but server is responsive)"
252
+
253
  self.conversation_history.append({"role": "assistant", "content": response})
254
+ return response, status + " - Connection/Tools issue"
255
 
256
  # For now, just show available tools and generate a helpful response
257
  # In a full implementation, you'd use OpenAI to select and execute appropriate tools
 
323
  history.append([message, error_msg])
324
  return history, ""
325
 
326
+ def get_server_status(self):
327
  status_info = "## MCP Servers Status\n\n"
328
+ for key, config in (mcp_client.mcp_servers.items() if mcp_client else []):
329
  try:
330
+ # Try multiple health check endpoints
331
+ health_urls = [
332
+ f"{config['space_url']}/health",
333
+ f"{config['space_url']}/",
334
+ f"{config['space_url']}/docs"
335
+ ]
336
+
337
+ status = "🔴 Offline"
338
+ for url in health_urls:
339
+ try:
340
+ response = requests.get(url, timeout=5)
341
+ if response.status_code == 200:
342
+ status = "🟢 Online"
343
+ break
344
+ elif response.status_code in [404, 422]: # Common for FastAPI
345
+ status = "🟡 Running (different endpoint structure)"
346
+ break
347
+ except:
348
+ continue
349
+
350
  except:
351
  status = "🔴 Offline"
352
 
353
  status_info += f"**{config['name']}**: {status}\n"
354
  status_info += f"- URL: {config['space_url']}\n"
355
+ status_info += f"- Description: {config['description']}\n"
356
+
357
+ # Try to get tools info for additional status
358
+ if mcp_client:
359
+ try:
360
+ tools = mcp_client.get_available_tools(key)
361
+ tools_count = len(tools) if tools else 0
362
+ status_info += f"- Available Tools: {tools_count}\n"
363
+ except:
364
+ status_info += f"- Available Tools: Unable to fetch\n"
365
+
366
+ status_info += "\n"
367
 
368
  return status_info
369