PD03 commited on
Commit
fda49a6
Β·
verified Β·
1 Parent(s): 4265ed3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +358 -94
app.py CHANGED
@@ -5,164 +5,428 @@ import json
5
  import asyncio
6
  import aiohttp
7
  from typing import Dict, Any, List, Tuple
 
 
8
 
9
  class MCPClient:
 
 
10
  def __init__(self, server_url: str):
11
  self.server_url = server_url.rstrip('/')
12
  self.session = None
13
-
14
  async def initialize_session(self):
 
15
  if not self.session:
16
  self.session = aiohttp.ClientSession()
17
-
18
  async def close_session(self):
 
19
  if self.session:
20
  await self.session.close()
21
  self.session = None
22
-
23
  async def call_tool(self, tool_name: str, arguments: Dict[str, Any] = None) -> Dict[str, Any]:
 
24
  if arguments is None:
25
  arguments = {}
26
-
27
  await self.initialize_session()
28
-
29
  mcp_request = {
30
  "jsonrpc": "2.0",
31
  "id": 1,
32
  "method": "tools/call",
33
- "params": {"name": tool_name, "arguments": arguments}
 
 
 
34
  }
35
-
36
  try:
37
- async with self.session.post(f"{self.server_url}/mcp", json=mcp_request) as response:
38
- return await response.json()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  except Exception as e:
40
- return {"success": False, "error": str(e)}
41
-
 
 
 
42
  async def list_tools(self) -> List[Dict[str, Any]]:
 
43
  await self.initialize_session()
44
- mcp_request = {"jsonrpc": "2.0", "id": 1, "method": "tools/list"}
 
 
 
 
 
 
45
  try:
46
- async with self.session.post(f"{self.server_url}/mcp", json=mcp_request) as response:
47
- result = await response.json()
48
- return result.get("result", {}).get("tools", [])
49
- except:
 
 
 
 
 
 
 
 
 
 
 
50
  return []
51
 
52
  class AIAssistant:
 
 
53
  def __init__(self, openai_api_key: str, mcp_client: MCPClient):
54
- self.openai_client = openai.OpenAI(api_key=openai_api_key)
 
 
 
 
 
 
 
 
55
  self.mcp_client = mcp_client
56
  self.available_tools = []
57
-
58
  async def initialize(self):
 
59
  self.available_tools = await self.mcp_client.list_tools()
60
-
61
  def get_system_prompt(self) -> str:
 
62
  tools_description = "\n".join([
63
  f"- {tool['name']}: {tool['description']}"
64
  for tool in self.available_tools
65
  ])
66
- return f"""You are an AI assistant with access to tools:
 
 
 
67
  {tools_description}
68
 
69
- Use these tools explicitly if user queries require external data.
 
 
 
70
 
71
- Respond with 'CALL_TOOL: tool_name(parameter=value)' to invoke tools.
72
- """
73
 
 
 
 
74
  def extract_tool_calls(self, response: str) -> List[Dict[str, Any]]:
 
75
  tool_calls = []
76
  lines = response.split('\n')
 
77
  for line in lines:
78
- if line.startswith('CALL_TOOL:'):
79
- tool_part = line[len('CALL_TOOL:'):].strip()
80
- tool_name, args = tool_part.split('(', 1)
81
- args = args.rstrip(')')
82
- arg_dict = {}
83
- for arg in args.split(','):
84
- key, value = arg.split('=')
85
- arg_dict[key.strip()] = value.strip().strip('"\'')
86
- tool_calls.append({'name': tool_name.strip(), 'arguments': arg_dict})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  return tool_calls
88
-
89
  async def process_message(self, user_message: str) -> Tuple[str, str]:
90
- messages = [
91
- {"role": "system", "content": self.get_system_prompt()},
92
- {"role": "user", "content": user_message}
93
- ]
94
- response = self.openai_client.chat.completions.create(
95
- model="gpt-3.5-turbo",
96
- messages=messages,
97
- temperature=0
98
- )
99
- ai_response = response.choices[0].message.content
100
- tool_calls = self.extract_tool_calls(ai_response)
101
-
102
  tool_info = ""
103
- if tool_calls:
104
- tool_results = []
105
- for call in tool_calls:
106
- result = await self.mcp_client.call_tool(call['name'], call['arguments'])
107
- tool_results.append(result)
108
- tool_info += f"Called {call['name']}: {result}\n"
109
-
110
- # Let AI interpret the tool results
111
- final_messages = messages + [
112
- {"role": "assistant", "content": ai_response},
113
- {"role": "user", "content": f"Tool results:\n{json.dumps(tool_results)}"}
114
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
115
 
116
- final_response = self.openai_client.chat.completions.create(
117
- model="gpt-3.5-turbo",
118
- messages=final_messages,
119
- temperature=0
120
- )
121
- return final_response.choices[0].message.content, tool_info
122
-
123
- return ai_response, ""
124
-
125
-
126
- # Globals
127
  assistant = None
 
128
 
129
- async def initialize_assistant(openai_key, mcp_url):
130
- global assistant
131
- if not openai_key or not mcp_url:
132
- return "❌ Provide valid OpenAI API key and MCP URL"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
133
 
134
- mcp_client = MCPClient(mcp_url)
135
- assistant = AIAssistant(openai_key, mcp_client)
136
- await assistant.initialize()
137
- return f"βœ… Initialized with {len(assistant.available_tools)} tools"
 
 
 
 
 
 
 
 
 
 
 
 
 
138
 
139
- async def chat_interface(message, history, openai_key, mcp_url):
 
140
  global assistant
 
141
  if not assistant:
142
- init_result = await initialize_assistant(openai_key, mcp_url)
143
  if "❌" in init_result:
144
  history.append([message, init_result])
145
  return history, ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
 
147
- response, tool_info = await assistant.process_message(message)
148
- full_response = f"{response}\n\n{tool_info}" if tool_info else response
149
- history.append([message, full_response])
150
- return history, ""
151
-
152
- with gr.Blocks() as demo:
153
- gr.Markdown("# πŸ€– MCP Assistant")
154
- chatbot = gr.Chatbot(height=400)
155
- msg = gr.Textbox(placeholder="Ask me anything...")
156
- openai_key = gr.Textbox(type="password", label="OpenAI API Key")
157
- mcp_url = gr.Textbox(label="MCP Server URL")
158
-
159
- submit_btn = gr.Button("Send")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
  submit_btn.click(
161
- chat_interface, [msg, chatbot, openai_key, mcp_url], [chatbot, msg]
 
 
162
  )
 
163
  msg.submit(
164
- chat_interface, [msg, chatbot, openai_key, mcp_url], [chatbot, msg]
 
 
 
 
 
 
 
 
 
 
165
  )
166
 
167
- demo.launch()
168
-
 
5
  import asyncio
6
  import aiohttp
7
  from typing import Dict, Any, List, Tuple
8
+ from datetime import datetime
9
+ import os
10
 
11
  class MCPClient:
12
+ """MCP Client for communicating with the MCP server"""
13
+
14
  def __init__(self, server_url: str):
15
  self.server_url = server_url.rstrip('/')
16
  self.session = None
17
+
18
  async def initialize_session(self):
19
+ """Initialize aiohttp session"""
20
  if not self.session:
21
  self.session = aiohttp.ClientSession()
22
+
23
  async def close_session(self):
24
+ """Close aiohttp session"""
25
  if self.session:
26
  await self.session.close()
27
  self.session = None
28
+
29
  async def call_tool(self, tool_name: str, arguments: Dict[str, Any] = None) -> Dict[str, Any]:
30
+ """Call a tool on the MCP server"""
31
  if arguments is None:
32
  arguments = {}
33
+
34
  await self.initialize_session()
35
+
36
  mcp_request = {
37
  "jsonrpc": "2.0",
38
  "id": 1,
39
  "method": "tools/call",
40
+ "params": {
41
+ "name": tool_name,
42
+ "arguments": arguments
43
+ }
44
  }
45
+
46
  try:
47
+ async with self.session.post(
48
+ f"{self.server_url}/mcp",
49
+ json=mcp_request,
50
+ headers={
51
+ "Content-Type": "application/json",
52
+ "ngrok-skip-browser-warning": "true"
53
+ },
54
+ timeout=30
55
+ ) as response:
56
+ if response.status == 200:
57
+ result = await response.json()
58
+ if "result" in result and "content" in result["result"]:
59
+ content = result["result"]["content"][0]["text"]
60
+ return json.loads(content)
61
+ return result
62
+ else:
63
+ return {
64
+ "success": False,
65
+ "error": f"HTTP {response.status}: {await response.text()}"
66
+ }
67
  except Exception as e:
68
+ return {
69
+ "success": False,
70
+ "error": f"Connection error: {str(e)}"
71
+ }
72
+
73
  async def list_tools(self) -> List[Dict[str, Any]]:
74
+ """List available tools on the MCP server"""
75
  await self.initialize_session()
76
+
77
+ mcp_request = {
78
+ "jsonrpc": "2.0",
79
+ "id": 1,
80
+ "method": "tools/list"
81
+ }
82
+
83
  try:
84
+ async with self.session.post(
85
+ f"{self.server_url}/mcp",
86
+ json=mcp_request,
87
+ headers={
88
+ "Content-Type": "application/json",
89
+ "ngrok-skip-browser-warning": "true"
90
+ },
91
+ timeout=30
92
+ ) as response:
93
+ if response.status == 200:
94
+ result = await response.json()
95
+ return result.get("result", {}).get("tools", [])
96
+ return []
97
+ except Exception as e:
98
+ print(f"Error listing tools: {str(e)}") # For debugging
99
  return []
100
 
101
  class AIAssistant:
102
+ """AI Assistant with MCP integration"""
103
+
104
  def __init__(self, openai_api_key: str, mcp_client: MCPClient):
105
+ try:
106
+ self.openai_client = openai.OpenAI(
107
+ api_key=openai_api_key,
108
+ timeout=30.0
109
+ )
110
+ except Exception as e:
111
+ # Fallback for older OpenAI versions
112
+ openai.api_key = openai_api_key
113
+ self.openai_client = openai
114
  self.mcp_client = mcp_client
115
  self.available_tools = []
116
+
117
  async def initialize(self):
118
+ """Initialize the assistant by fetching available tools"""
119
  self.available_tools = await self.mcp_client.list_tools()
120
+
121
  def get_system_prompt(self) -> str:
122
+ """Generate system prompt with available tools"""
123
  tools_description = "\n".join([
124
  f"- {tool['name']}: {tool['description']}"
125
  for tool in self.available_tools
126
  ])
127
+
128
+ return f"""You are an AI assistant with access to SAP business systems and news data through specialized tools.
129
+
130
+ Available tools:
131
  {tools_description}
132
 
133
+ When a user asks for information that can be retrieved using these tools, you should:
134
+ 1. Identify which tool(s) would be helpful
135
+ 2. Call the appropriate tool(s) with the right parameters
136
+ 3. Interpret and present the results in a user-friendly way
137
 
138
+ For SAP-related queries (purchase orders, requisitions), use the SAP tools.
139
+ For news-related queries, use the news tools.
140
 
141
+ You can call tools by responding with: CALL_TOOL: tool_name(parameter1=value1, parameter2=value2)
142
+ """
143
+
144
  def extract_tool_calls(self, response: str) -> List[Dict[str, Any]]:
145
+ """Extract tool calls from AI response"""
146
  tool_calls = []
147
  lines = response.split('\n')
148
+
149
  for line in lines:
150
+ if line.strip().startswith('CALL_TOOL:'):
151
+ try:
152
+ tool_part = line.strip()[10:].strip()
153
+
154
+ if '(' in tool_part and ')' in tool_part:
155
+ tool_name = tool_part.split('(')[0].strip()
156
+ params_str = tool_part.split('(')[1].split(')')[0]
157
+
158
+ params = {}
159
+ if params_str.strip():
160
+ for param in params_str.split(','):
161
+ if '=' in param:
162
+ key, value = param.split('=', 1)
163
+ key = key.strip()
164
+ value = value.strip().strip('"\'')
165
+ try:
166
+ if value.isdigit():
167
+ value = int(value)
168
+ elif value.lower() in ['true', 'false']:
169
+ value = value.lower() == 'true'
170
+ except:
171
+ pass
172
+ params[key] = value
173
+
174
+ tool_calls.append({
175
+ 'name': tool_name,
176
+ 'arguments': params
177
+ })
178
+ except Exception as e:
179
+ continue
180
+
181
  return tool_calls
182
+
183
  async def process_message(self, user_message: str) -> Tuple[str, str]:
184
+ """Process user message and handle tool calls"""
 
 
 
 
 
 
 
 
 
 
 
185
  tool_info = ""
186
+
187
+ try:
188
+ messages = [
189
+ {"role": "system", "content": self.get_system_prompt()},
190
+ {"role": "user", "content": user_message}
 
 
 
 
 
 
191
  ]
192
+
193
+ # Check if we have a proper OpenAI client
194
+ if hasattr(self.openai_client, 'chat'):
195
+ response = self.openai_client.chat.completions.create(
196
+ model="gpt-3.5-turbo",
197
+ messages=messages,
198
+ temperature=0.7,
199
+ max_tokens=1000
200
+ )
201
+ ai_response = response.choices[0].message.content
202
+ else:
203
+ # Fallback for older API
204
+ response = self.openai_client.ChatCompletion.create(
205
+ model="gpt-3.5-turbo",
206
+ messages=messages,
207
+ temperature=0.7,
208
+ max_tokens=1000
209
+ )
210
+ ai_response = response.choices[0].message.content
211
+ tool_calls = self.extract_tool_calls(ai_response)
212
+
213
+ if tool_calls:
214
+ tool_results = []
215
+
216
+ for tool_call in tool_calls:
217
+ tool_info += f"πŸ”§ Calling: {tool_call['name']}\n"
218
+
219
+ result = await self.mcp_client.call_tool(
220
+ tool_call['name'],
221
+ tool_call['arguments']
222
+ )
223
+
224
+ tool_results.append({
225
+ 'tool': tool_call['name'],
226
+ 'result': result
227
+ })
228
+
229
+ if result.get('success'):
230
+ tool_info += f"βœ… {tool_call['name']} completed\n"
231
+ else:
232
+ tool_info += f"❌ {tool_call['name']} failed: {result.get('error', 'Unknown error')}\n"
233
+
234
+ tool_results_text = "\n\n".join([
235
+ f"Tool: {tr['tool']}\nResult: {json.dumps(tr['result'], indent=2)}"
236
+ for tr in tool_results
237
+ ])
238
+
239
+ final_messages = messages + [
240
+ {"role": "assistant", "content": ai_response},
241
+ {"role": "user", "content": f"Here are the tool results:\n\n{tool_results_text}\n\nPlease interpret these results and provide a helpful response to the user."}
242
+ ]
243
+
244
+ # Get final response with tool results
245
+ if hasattr(self.openai_client, 'chat'):
246
+ final_response = self.openai_client.chat.completions.create(
247
+ model="gpt-3.5-turbo",
248
+ messages=final_messages,
249
+ temperature=0.7,
250
+ max_tokens=1000
251
+ )
252
+ return final_response.choices[0].message.content, tool_info
253
+ else:
254
+ final_response = self.openai_client.ChatCompletion.create(
255
+ model="gpt-3.5-turbo",
256
+ messages=final_messages,
257
+ temperature=0.7,
258
+ max_tokens=1000
259
+ )
260
+ return final_response.choices[0].message.content, tool_info
261
+ else:
262
+ return ai_response, ""
263
+
264
+ except Exception as e:
265
+ return f"❌ Error processing your request: {str(e)}", ""
266
 
267
+ # Global variables
 
 
 
 
 
 
 
 
 
 
268
  assistant = None
269
+ mcp_client = None
270
 
271
+ def test_connection(mcp_url):
272
+ """Test MCP server connection"""
273
+ if not mcp_url or mcp_url == "https://your-ngrok-url.ngrok.io":
274
+ return "❌ Please enter a valid MCP server URL"
275
+
276
+ try:
277
+ # Test health endpoint
278
+ response = requests.get(f"{mcp_url.rstrip('/')}/health", timeout=10)
279
+ if response.status_code == 200:
280
+ data = response.json()
281
+
282
+ # Test MCP tools list
283
+ mcp_request = {
284
+ "jsonrpc": "2.0",
285
+ "id": 1,
286
+ "method": "tools/list"
287
+ }
288
+
289
+ mcp_response = requests.post(
290
+ f"{mcp_url.rstrip('/')}/mcp",
291
+ json=mcp_request,
292
+ headers={
293
+ "Content-Type": "application/json",
294
+ "ngrok-skip-browser-warning": "true"
295
+ },
296
+ timeout=10
297
+ )
298
+
299
+ if mcp_response.status_code == 200:
300
+ mcp_data = mcp_response.json()
301
+ tools = mcp_data.get("result", {}).get("tools", [])
302
+ tool_names = [tool.get("name", "Unknown") for tool in tools]
303
+
304
+ return f"βœ… Connected successfully!\nHealth Status: {data.get('status', 'Unknown')}\nMCP Tools: {len(tools)}\nAvailable: {', '.join(tool_names)}"
305
+ else:
306
+ return f"βœ… Health OK, but MCP endpoint failed: HTTP {mcp_response.status_code}"
307
+ else:
308
+ return f"❌ Connection failed: HTTP {response.status_code}"
309
+ except Exception as e:
310
+ return f"❌ Connection error: {str(e)}"
311
 
312
+ async def initialize_assistant(openai_key, mcp_url):
313
+ """Initialize the AI assistant"""
314
+ global assistant, mcp_client
315
+
316
+ if not openai_key:
317
+ return "❌ Please enter your OpenAI API key"
318
+
319
+ if not mcp_url or mcp_url == "https://your-ngrok-url.ngrok.io":
320
+ return "❌ Please enter a valid MCP server URL"
321
+
322
+ try:
323
+ mcp_client = MCPClient(mcp_url)
324
+ assistant = AIAssistant(openai_key, mcp_client)
325
+ await assistant.initialize()
326
+ return f"βœ… AI Assistant initialized with {len(assistant.available_tools)} tools available"
327
+ except Exception as e:
328
+ return f"❌ Failed to initialize: {str(e)}"
329
 
330
+ def chat_interface(message, history, openai_key, mcp_url):
331
+ """Main chat interface"""
332
  global assistant
333
+
334
  if not assistant:
335
+ init_result = asyncio.run(initialize_assistant(openai_key, mcp_url))
336
  if "❌" in init_result:
337
  history.append([message, init_result])
338
  return history, ""
339
+
340
+ try:
341
+ response, tool_info = asyncio.run(assistant.process_message(message))
342
+
343
+ # Format response with tool info if available
344
+ if tool_info:
345
+ full_response = f"**Tool Execution:**\n{tool_info}\n\n**Response:**\n{response}"
346
+ else:
347
+ full_response = response
348
+
349
+ history.append([message, full_response])
350
+ return history, ""
351
+ except Exception as e:
352
+ error_response = f"❌ Error: {str(e)}"
353
+ history.append([message, error_response])
354
+ return history, ""
355
 
356
+ # Create Gradio interface
357
+ with gr.Blocks(title="AI Assistant with SAP & News Integration", theme=gr.themes.Soft()) as demo:
358
+ gr.Markdown("# πŸ€– AI Assistant with SAP & News Integration")
359
+ gr.Markdown("Chat with an AI that can access SAP business data and news through natural language queries.")
360
+
361
+ with gr.Row():
362
+ with gr.Column(scale=2):
363
+ chatbot = gr.Chatbot(
364
+ height=500,
365
+ show_label=False,
366
+ container=True,
367
+ bubble_full_width=False
368
+ )
369
+
370
+ msg = gr.Textbox(
371
+ placeholder="Ask me about SAP data, news, or anything else...",
372
+ show_label=False,
373
+ container=False
374
+ )
375
+
376
+ with gr.Row():
377
+ submit_btn = gr.Button("Send", variant="primary")
378
+ clear_btn = gr.Button("Clear", variant="secondary")
379
+
380
+ with gr.Column(scale=1):
381
+ gr.Markdown("### βš™οΈ Configuration")
382
+
383
+ openai_key = gr.Textbox(
384
+ label="OpenAI API Key",
385
+ type="password",
386
+ placeholder="sk-..."
387
+ )
388
+
389
+ mcp_url = gr.Textbox(
390
+ label="MCP Server URL",
391
+ value="https://your-ngrok-url.ngrok.io",
392
+ placeholder="https://abc123.ngrok.io"
393
+ )
394
+
395
+ test_btn = gr.Button("Test Connection", variant="secondary")
396
+ connection_status = gr.Textbox(label="Connection Status", interactive=False)
397
+
398
+ gr.Markdown("### πŸ“‹ Example Queries")
399
+ gr.Markdown("""
400
+ - "Show me recent purchase orders"
401
+ - "Get purchase requisitions"
402
+ - "What's the latest tech news?"
403
+ - "Get news from BBC"
404
+ - "Show me business news from the US"
405
+ """)
406
+
407
+ # Event handlers
408
+ def respond(message, history, openai_key, mcp_url):
409
+ return chat_interface(message, history, openai_key, mcp_url)
410
+
411
  submit_btn.click(
412
+ respond,
413
+ [msg, chatbot, openai_key, mcp_url],
414
+ [chatbot, msg]
415
  )
416
+
417
  msg.submit(
418
+ respond,
419
+ [msg, chatbot, openai_key, mcp_url],
420
+ [chatbot, msg]
421
+ )
422
+
423
+ clear_btn.click(lambda: ([], ""), outputs=[chatbot, msg])
424
+
425
+ test_btn.click(
426
+ test_connection,
427
+ [mcp_url],
428
+ [connection_status]
429
  )
430
 
431
+ if __name__ == "__main__":
432
+ demo.launch()