cevheri commited on
Commit
8a3b182
·
1 Parent(s): c6df9a6

initial commit

Browse files
Files changed (2) hide show
  1. app.py +9 -15
  2. mcp_client.py +12 -9
app.py CHANGED
@@ -7,11 +7,13 @@ from mcp_client import MCPClient
7
 
8
  logger = logging.getLogger(__name__)
9
 
 
10
  async def initialize_client():
11
  client = MCPClient()
12
  await client.initialize()
13
  return client
14
 
 
15
  def launch_ui():
16
  with gr.Blocks(title="MCP Chatbot UI", fill_height=True, fill_width=True) as demo:
17
  gr.Markdown("## Dnext Product Catalog AI Assistant")
@@ -19,9 +21,7 @@ def launch_ui():
19
 
20
  chatbot = gr.Chatbot(height=600, label="Chatbot", type="messages")
21
  msg = gr.Textbox(
22
- label="Enter your request",
23
- placeholder="Type your message here...",
24
- lines=2
25
  )
26
  submit_btn = gr.Button("Submit")
27
  clear_btn = gr.Button("Clear Chat")
@@ -30,32 +30,26 @@ def launch_ui():
30
  bot_response = await client.invoke(user_message)
31
  return [
32
  {"role": "user", "content": user_message},
33
- {"role": "assistant", "content": bot_response}
34
  ]
35
 
36
  def clear_chat():
37
  return [], []
38
 
39
- submit_btn.click(
40
- fn=respond,
41
- inputs=[msg],
42
- outputs=[chatbot]
43
- )
44
-
45
- clear_btn.click(
46
- fn=clear_chat,
47
- inputs=[],
48
- outputs=[chatbot, msg]
49
- )
50
 
51
  return demo
52
 
 
53
  async def main():
54
  global client
55
  client = await initialize_client()
56
  demo = launch_ui()
57
  demo.launch(share=False)
58
 
 
59
  if __name__ == "__main__":
60
  asyncio.run(main())
61
 
 
7
 
8
  logger = logging.getLogger(__name__)
9
 
10
+
11
  async def initialize_client():
12
  client = MCPClient()
13
  await client.initialize()
14
  return client
15
 
16
+
17
  def launch_ui():
18
  with gr.Blocks(title="MCP Chatbot UI", fill_height=True, fill_width=True) as demo:
19
  gr.Markdown("## Dnext Product Catalog AI Assistant")
 
21
 
22
  chatbot = gr.Chatbot(height=600, label="Chatbot", type="messages")
23
  msg = gr.Textbox(
24
+ label="Enter your request", placeholder="Type your message here...", lines=2
 
 
25
  )
26
  submit_btn = gr.Button("Submit")
27
  clear_btn = gr.Button("Clear Chat")
 
30
  bot_response = await client.invoke(user_message)
31
  return [
32
  {"role": "user", "content": user_message},
33
+ {"role": "assistant", "content": bot_response},
34
  ]
35
 
36
  def clear_chat():
37
  return [], []
38
 
39
+ submit_btn.click(fn=respond, inputs=[msg], outputs=[chatbot])
40
+
41
+ clear_btn.click(fn=clear_chat, inputs=[], outputs=[chatbot, msg])
 
 
 
 
 
 
 
 
42
 
43
  return demo
44
 
45
+
46
  async def main():
47
  global client
48
  client = await initialize_client()
49
  demo = launch_ui()
50
  demo.launch(share=False)
51
 
52
+
53
  if __name__ == "__main__":
54
  asyncio.run(main())
55
 
mcp_client.py CHANGED
@@ -93,19 +93,22 @@ class MCPClient:
93
  )
94
  logger.info("Invoking agent...")
95
  config = {"configurable": {"thread_id": "conversation_123"}}
96
- result = await self.agent.ainvoke(input={"messages": input_messages}, config=config)
 
 
97
  logger.info(f"Agent result: {result}")
98
  logger.info("========================================================")
99
  last_message = result["messages"][-1]
100
  logger.info(f"Last message: {last_message.content}")
101
  return last_message.content
102
 
103
- # Agent result: {'messages':
 
104
  # [HumanMessage(content='hi', additional_kwargs={}, response_metadata={}, id='205d9484-c4f0-4e9e-962f-218d2e82bc03'),
105
- # AIMessage(content='This is just a greeting, so no API call is required. If you have any tasks or requests related to product catalog operations, please let me know how I can assist you!',
106
- # additional_kwargs={'refusal': None},
107
- # response_metadata={'token_usage': {'completion_tokens': 37, 'prompt_tokens': 27124, 'total_tokens': 27161,
108
- # 'completion_tokens_details': {'accepted_prediction_tokens': 0, 'audio_tokens': 0, 'reasoning_tokens': 0, 'rejected_prediction_tokens': 0},
109
- # 'prompt_tokens_details': {'audio_tokens': 0, 'cached_tokens': 27008}}, 'model_name': 'gpt-4.1-2025-04-14',
110
- # 'system_fingerprint': 'fp_799e4ca3f1', 'id': 'chatcmpl-BejXD72NDlc9UqrJiobIA6tQbuNaj', 'service_tier': 'default', 'finish_reason': 'stop', 'logprobs': None}, id='run--dee9f47d-99f9-40a6-b8c7-241668e6ac38-0',
111
- # usage_metadata={'input_tokens': 27124, 'output_tokens': 37, 'total_tokens': 27161, 'input_token_details': {'audio': 0, 'cache_read': 27008}, 'output_token_details': {'audio': 0, 'reasoning': 0}})]}
 
93
  )
94
  logger.info("Invoking agent...")
95
  config = {"configurable": {"thread_id": "conversation_123"}}
96
+ result = await self.agent.ainvoke(
97
+ input={"messages": input_messages}, config=config
98
+ )
99
  logger.info(f"Agent result: {result}")
100
  logger.info("========================================================")
101
  last_message = result["messages"][-1]
102
  logger.info(f"Last message: {last_message.content}")
103
  return last_message.content
104
 
105
+
106
+ # Agent result: {'messages':
107
  # [HumanMessage(content='hi', additional_kwargs={}, response_metadata={}, id='205d9484-c4f0-4e9e-962f-218d2e82bc03'),
108
+ # AIMessage(content='This is just a greeting, so no API call is required. If you have any tasks or requests related to product catalog operations, please let me know how I can assist you!',
109
+ # additional_kwargs={'refusal': None},
110
+ # response_metadata={'token_usage': {'completion_tokens': 37, 'prompt_tokens': 27124, 'total_tokens': 27161,
111
+ # 'completion_tokens_details': {'accepted_prediction_tokens': 0, 'audio_tokens': 0, 'reasoning_tokens': 0, 'rejected_prediction_tokens': 0},
112
+ # 'prompt_tokens_details': {'audio_tokens': 0, 'cached_tokens': 27008}}, 'model_name': 'gpt-4.1-2025-04-14',
113
+ # 'system_fingerprint': 'fp_799e4ca3f1', 'id': 'chatcmpl-BejXD72NDlc9UqrJiobIA6tQbuNaj', 'service_tier': 'default', 'finish_reason': 'stop', 'logprobs': None}, id='run--dee9f47d-99f9-40a6-b8c7-241668e6ac38-0',
114
+ # usage_metadata={'input_tokens': 27124, 'output_tokens': 37, 'total_tokens': 27161, 'input_token_details': {'audio': 0, 'cache_read': 27008}, 'output_token_details': {'audio': 0, 'reasoning': 0}})]}