PD03 commited on
Commit
c048102
Β·
verified Β·
1 Parent(s): 8622f42

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -315
app.py CHANGED
@@ -5,374 +5,106 @@ import json
5
  import asyncio
6
  import aiohttp
7
  from typing import Dict, Any, List, Tuple
8
- from datetime import datetime
9
- import os
10
 
11
  class MCPClient:
12
- """MCP Client for communicating with the MCP server"""
13
-
14
  def __init__(self, server_url: str):
15
  self.server_url = server_url.rstrip('/')
16
  self.session = None
17
-
18
  async def initialize_session(self):
19
- """Initialize aiohttp session"""
20
  if not self.session:
21
  self.session = aiohttp.ClientSession()
22
-
23
  async def close_session(self):
24
- """Close aiohttp session"""
25
  if self.session:
26
  await self.session.close()
27
  self.session = None
28
-
29
  async def call_tool(self, tool_name: str, arguments: Dict[str, Any] = None) -> Dict[str, Any]:
30
- """Call a tool on the MCP server"""
31
  if arguments is None:
32
  arguments = {}
33
-
34
  await self.initialize_session()
35
-
36
  mcp_request = {
37
  "jsonrpc": "2.0",
38
  "id": 1,
39
  "method": "tools/call",
40
- "params": {
41
- "name": tool_name,
42
- "arguments": arguments
43
- }
44
  }
45
-
46
  try:
47
- async with self.session.post(
48
- f"{self.server_url}/mcp",
49
- json=mcp_request,
50
- headers={"Content-Type": "application/json"}
51
- ) as response:
52
- if response.status == 200:
53
- result = await response.json()
54
- if "result" in result and "content" in result["result"]:
55
- content = result["result"]["content"][0]["text"]
56
- return json.loads(content)
57
- return result
58
- else:
59
- return {
60
- "success": False,
61
- "error": f"HTTP {response.status}: {await response.text()}"
62
- }
63
  except Exception as e:
64
- return {
65
- "success": False,
66
- "error": f"Connection error: {str(e)}"
67
- }
68
-
69
  async def list_tools(self) -> List[Dict[str, Any]]:
70
- """List available tools on the MCP server"""
71
  await self.initialize_session()
72
-
73
- mcp_request = {
74
- "jsonrpc": "2.0",
75
- "id": 1,
76
- "method": "tools/list"
77
- }
78
-
79
  try:
80
- async with self.session.post(
81
- f"{self.server_url}/mcp",
82
- json=mcp_request,
83
- headers={"Content-Type": "application/json"}
84
- ) as response:
85
- if response.status == 200:
86
- result = await response.json()
87
- return result.get("result", {}).get("tools", [])
88
- return []
89
- except Exception as e:
90
  return []
91
 
92
  class AIAssistant:
93
- """AI Assistant with MCP integration"""
94
-
95
  def __init__(self, openai_api_key: str, mcp_client: MCPClient):
96
- try:
97
- self.openai_client = openai.OpenAI(
98
- api_key=openai_api_key,
99
- timeout=30.0
100
- )
101
- except Exception as e:
102
- # Fallback for older OpenAI versions
103
- openai.api_key = openai_api_key
104
- self.openai_client = openai
105
  self.mcp_client = mcp_client
106
  self.available_tools = []
107
-
108
  async def initialize(self):
109
- """Initialize the assistant by fetching available tools"""
110
  self.available_tools = await self.mcp_client.list_tools()
111
-
112
- def get_system_prompt(self) -> str:
113
- """Generate system prompt with available tools"""
114
- tools_description = "\n".join([
115
- f"- {tool['name']}: {tool['description']}"
116
- for tool in self.available_tools
117
- ])
118
-
119
- return f"""You are an AI assistant with access to SAP business systems and news data through specialized tools.
120
-
121
- Available tools:
122
- {tools_description}
123
-
124
- When a user asks for information that can be retrieved using these tools, you should:
125
- 1. Identify which tool(s) would be helpful
126
- 2. Call the appropriate tool(s) with the right parameters
127
- 3. Interpret and present the results in a user-friendly way
128
 
129
- For SAP-related queries (purchase orders, requisitions), use the SAP tools.
130
- For news-related queries, use the news tools.
131
-
132
- You can call tools by responding with: CALL_TOOL: tool_name(parameter1=value1, parameter2=value2)
133
- """
134
-
135
- def extract_tool_calls(self, response: str) -> List[Dict[str, Any]]:
136
- """Extract tool calls from AI response"""
137
- tool_calls = []
138
- lines = response.split('\n')
139
-
140
- for line in lines:
141
- if line.strip().startswith('CALL_TOOL:'):
142
- try:
143
- tool_part = line.strip()[10:].strip()
144
-
145
- if '(' in tool_part and ')' in tool_part:
146
- tool_name = tool_part.split('(')[0].strip()
147
- params_str = tool_part.split('(')[1].split(')')[0]
148
-
149
- params = {}
150
- if params_str.strip():
151
- for param in params_str.split(','):
152
- if '=' in param:
153
- key, value = param.split('=', 1)
154
- key = key.strip()
155
- value = value.strip().strip('"\'')
156
- try:
157
- if value.isdigit():
158
- value = int(value)
159
- elif value.lower() in ['true', 'false']:
160
- value = value.lower() == 'true'
161
- except:
162
- pass
163
- params[key] = value
164
-
165
- tool_calls.append({
166
- 'name': tool_name,
167
- 'arguments': params
168
- })
169
- except Exception as e:
170
- continue
171
-
172
- return tool_calls
173
-
174
  async def process_message(self, user_message: str) -> Tuple[str, str]:
175
- """Process user message and handle tool calls"""
176
- tool_info = ""
177
-
178
  try:
179
- messages = [
180
- {"role": "system", "content": self.get_system_prompt()},
181
- {"role": "user", "content": user_message}
182
- ]
183
-
184
- response = self.openai_client.chat.completions.create(
185
  model="gpt-3.5-turbo",
186
- messages=messages,
187
  temperature=0.7,
188
  max_tokens=1000
189
  )
190
-
191
- ai_response = response.choices[0].message.content
192
- tool_calls = self.extract_tool_calls(ai_response)
193
-
194
- if tool_calls:
195
- tool_results = []
196
-
197
- for tool_call in tool_calls:
198
- tool_info += f"πŸ”§ Calling: {tool_call['name']}\n"
199
-
200
- result = await self.mcp_client.call_tool(
201
- tool_call['name'],
202
- tool_call['arguments']
203
- )
204
-
205
- tool_results.append({
206
- 'tool': tool_call['name'],
207
- 'result': result
208
- })
209
-
210
- if result.get('success'):
211
- tool_info += f"βœ… {tool_call['name']} completed\n"
212
- else:
213
- tool_info += f"❌ {tool_call['name']} failed: {result.get('error', 'Unknown error')}\n"
214
-
215
- tool_results_text = "\n\n".join([
216
- f"Tool: {tr['tool']}\nResult: {json.dumps(tr['result'], indent=2)}"
217
- for tr in tool_results
218
- ])
219
-
220
- final_messages = messages + [
221
- {"role": "assistant", "content": ai_response},
222
- {"role": "user", "content": f"Here are the tool results:\n\n{tool_results_text}\n\nPlease interpret these results and provide a helpful response to the user."}
223
- ]
224
-
225
- final_response = self.openai_client.chat.completions.create(
226
- model="gpt-3.5-turbo",
227
- messages=final_messages,
228
- temperature=0.7,
229
- max_tokens=1000
230
- )
231
-
232
- return final_response.choices[0].message.content, tool_info
233
- else:
234
- return ai_response, ""
235
-
236
  except Exception as e:
237
- return f"❌ Error processing your request: {str(e)}", ""
238
 
239
- # Global variables
240
  assistant = None
241
- mcp_client = None
242
-
243
- def test_connection(mcp_url):
244
- """Test MCP server connection"""
245
- if not mcp_url or mcp_url == "https://your-ngrok-url.ngrok.io":
246
- return "❌ Please enter a valid MCP server URL"
247
-
248
- try:
249
- response = requests.get(f"{mcp_url.rstrip('/')}/health", timeout=10)
250
- if response.status_code == 200:
251
- data = response.json()
252
- return f"βœ… Connected successfully!\nStatus: {data.get('status', 'Unknown')}\nTools: {len(data.get('available_tools', []))}"
253
- else:
254
- return f"❌ Connection failed: HTTP {response.status_code}"
255
- except Exception as e:
256
- return f"❌ Connection error: {str(e)}"
257
 
258
  async def initialize_assistant(openai_key, mcp_url):
259
- """Initialize the AI assistant"""
260
- global assistant, mcp_client
261
-
262
- if not openai_key:
263
- return "❌ Please enter your OpenAI API key"
264
-
265
- if not mcp_url or mcp_url == "https://your-ngrok-url.ngrok.io":
266
- return "❌ Please enter a valid MCP server URL"
267
-
268
- try:
269
- mcp_client = MCPClient(mcp_url)
270
- assistant = AIAssistant(openai_key, mcp_client)
271
- await assistant.initialize()
272
- return f"βœ… AI Assistant initialized with {len(assistant.available_tools)} tools available"
273
- except Exception as e:
274
- return f"❌ Failed to initialize: {str(e)}"
275
 
276
  def chat_interface(message, history, openai_key, mcp_url):
277
- """Main chat interface"""
278
  global assistant
279
-
280
  if not assistant:
281
  init_result = asyncio.run(initialize_assistant(openai_key, mcp_url))
282
  if "❌" in init_result:
283
  history.append([message, init_result])
284
  return history, ""
285
-
286
- try:
287
- response, tool_info = asyncio.run(assistant.process_message(message))
288
-
289
- # Format response with tool info if available
290
- if tool_info:
291
- full_response = f"**Tool Execution:**\n{tool_info}\n\n**Response:**\n{response}"
292
- else:
293
- full_response = response
294
-
295
- history.append([message, full_response])
296
- return history, ""
297
- except Exception as e:
298
- error_response = f"❌ Error: {str(e)}"
299
- history.append([message, error_response])
300
- return history, ""
301
 
302
- # Create Gradio interface
303
- with gr.Blocks(title="AI Assistant with SAP & News Integration", theme=gr.themes.Soft()) as demo:
304
- gr.Markdown("# πŸ€– AI Assistant with SAP & News Integration")
305
- gr.Markdown("Chat with an AI that can access SAP business data and news through natural language queries.")
306
-
307
- with gr.Row():
308
- with gr.Column(scale=2):
309
- chatbot = gr.Chatbot(
310
- height=500,
311
- show_label=False,
312
- container=True,
313
- bubble_full_width=False
314
- )
315
-
316
- msg = gr.Textbox(
317
- placeholder="Ask me about SAP data, news, or anything else...",
318
- show_label=False,
319
- container=False
320
- )
321
-
322
- with gr.Row():
323
- submit_btn = gr.Button("Send", variant="primary")
324
- clear_btn = gr.Button("Clear", variant="secondary")
325
-
326
- with gr.Column(scale=1):
327
- gr.Markdown("### βš™οΈ Configuration")
328
-
329
- openai_key = gr.Textbox(
330
- label="OpenAI API Key",
331
- type="password",
332
- placeholder="sk-..."
333
- )
334
-
335
- mcp_url = gr.Textbox(
336
- label="MCP Server URL",
337
- value="https://your-ngrok-url.ngrok.io",
338
- placeholder="https://abc123.ngrok.io"
339
- )
340
-
341
- test_btn = gr.Button("Test Connection", variant="secondary")
342
- connection_status = gr.Textbox(label="Connection Status", interactive=False)
343
-
344
- gr.Markdown("### πŸ“‹ Example Queries")
345
- gr.Markdown("""
346
- - "Show me recent purchase orders"
347
- - "Get purchase requisitions"
348
- - "What's the latest tech news?"
349
- - "Get news from BBC"
350
- - "Show me business news from the US"
351
- """)
352
-
353
- # Event handlers
354
- def respond(message, history, openai_key, mcp_url):
355
- return chat_interface(message, history, openai_key, mcp_url)
356
-
357
- submit_btn.click(
358
- respond,
359
- [msg, chatbot, openai_key, mcp_url],
360
- [chatbot, msg]
361
- )
362
-
363
- msg.submit(
364
- respond,
365
- [msg, chatbot, openai_key, mcp_url],
366
- [chatbot, msg]
367
- )
368
-
369
- clear_btn.click(lambda: ([], ""), outputs=[chatbot, msg])
370
-
371
- test_btn.click(
372
- test_connection,
373
- [mcp_url],
374
- [connection_status]
375
- )
376
 
377
- if __name__ == "__main__":
378
- demo.launch()
 
5
  import asyncio
6
  import aiohttp
7
  from typing import Dict, Any, List, Tuple
 
 
8
 
9
  class MCPClient:
 
 
10
  def __init__(self, server_url: str):
11
  self.server_url = server_url.rstrip('/')
12
  self.session = None
13
+
14
  async def initialize_session(self):
 
15
  if not self.session:
16
  self.session = aiohttp.ClientSession()
17
+
18
  async def close_session(self):
 
19
  if self.session:
20
  await self.session.close()
21
  self.session = None
22
+
23
  async def call_tool(self, tool_name: str, arguments: Dict[str, Any] = None) -> Dict[str, Any]:
 
24
  if arguments is None:
25
  arguments = {}
26
+
27
  await self.initialize_session()
28
+
29
  mcp_request = {
30
  "jsonrpc": "2.0",
31
  "id": 1,
32
  "method": "tools/call",
33
+ "params": {"name": tool_name, "arguments": arguments}
 
 
 
34
  }
35
+
36
  try:
37
+ async with self.session.post(f"{self.server_url}/mcp", json=mcp_request) as response:
38
+ return await response.json()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  except Exception as e:
40
+ return {"success": False, "error": str(e)}
41
+
 
 
 
42
  async def list_tools(self) -> List[Dict[str, Any]]:
 
43
  await self.initialize_session()
44
+ mcp_request = {"jsonrpc": "2.0", "id": 1, "method": "tools/list"}
 
 
 
 
 
 
45
  try:
46
+ async with self.session.post(f"{self.server_url}/mcp", json=mcp_request) as response:
47
+ result = await response.json()
48
+ return result.get("result", {}).get("tools", [])
49
+ except:
 
 
 
 
 
 
50
  return []
51
 
52
  class AIAssistant:
 
 
53
  def __init__(self, openai_api_key: str, mcp_client: MCPClient):
54
+ openai.api_key = openai_api_key
55
+ self.openai_client = openai
 
 
 
 
 
 
 
56
  self.mcp_client = mcp_client
57
  self.available_tools = []
58
+
59
  async def initialize(self):
 
60
  self.available_tools = await self.mcp_client.list_tools()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  async def process_message(self, user_message: str) -> Tuple[str, str]:
 
 
 
63
  try:
64
+ response = self.openai_client.ChatCompletion.create(
 
 
 
 
 
65
  model="gpt-3.5-turbo",
66
+ messages=[{"role": "user", "content": user_message}],
67
  temperature=0.7,
68
  max_tokens=1000
69
  )
70
+ return response.choices[0].message.content, ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
  except Exception as e:
72
+ return f"❌ Error: {str(e)}", ""
73
 
74
+ # Globals
75
  assistant = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
 
77
  async def initialize_assistant(openai_key, mcp_url):
78
+ global assistant
79
+ if not openai_key or not mcp_url:
80
+ return "❌ Provide valid OpenAI API key and MCP URL"
81
+
82
+ mcp_client = MCPClient(mcp_url)
83
+ assistant = AIAssistant(openai_key, mcp_client)
84
+ await assistant.initialize()
85
+ return f"βœ… Initialized with {len(assistant.available_tools)} tools"
 
 
 
 
 
 
 
 
86
 
87
  def chat_interface(message, history, openai_key, mcp_url):
 
88
  global assistant
 
89
  if not assistant:
90
  init_result = asyncio.run(initialize_assistant(openai_key, mcp_url))
91
  if "❌" in init_result:
92
  history.append([message, init_result])
93
  return history, ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
 
95
+ response, _ = asyncio.run(assistant.process_message(message))
96
+ history.append([message, response])
97
+ return history, ""
98
+
99
+ with gr.Blocks() as demo:
100
+ gr.Markdown("# πŸ€– MCP Assistant")
101
+ chatbot = gr.Chatbot(height=400)
102
+ msg = gr.Textbox(placeholder="Ask me anything...")
103
+ openai_key = gr.Textbox(type="password", label="OpenAI API Key")
104
+ mcp_url = gr.Textbox(label="MCP Server URL")
105
+
106
+ submit_btn = gr.Button("Send")
107
+ submit_btn.click(chat_interface, [msg, chatbot, openai_key, mcp_url], [chatbot, msg])
108
+ msg.submit(chat_interface, [msg, chatbot, openai_key, mcp_url], [chatbot, msg])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
 
110
+ demo.launch()