algorhythym commited on
Commit
b9a84a1
·
verified ·
1 Parent(s): 1f09009

Update server/client.py

Browse files
Files changed (1) hide show
  1. server/client.py +29 -72
server/client.py CHANGED
@@ -1,72 +1,29 @@
1
- import asyncio
2
-
3
- from dotenv import load_dotenv
4
- from langchain_groq import ChatGroq
5
-
6
- from mcp_use import MCPAgent, MCPClient
7
- import os
8
-
9
- async def run_memory_chat():
10
- """Run a chat using MCPAgent's built-in conversation memory."""
11
- # Load environment variables for API keys
12
- load_dotenv()
13
- os.environ["GROQ_API_KEY"]=os.getenv("GROQ_API_KEY")
14
-
15
- # Config file path - change this to your config file
16
- config_file = "server/weather.json"
17
-
18
- print("Initializing chat...")
19
-
20
- # Create MCP client and agent with memory enabled
21
- client = MCPClient.from_config_file(config_file)
22
- llm = ChatGroq(model="qwen-qwq-32b")
23
-
24
- # Create agent with memory_enabled=True
25
- agent = MCPAgent(
26
- llm=llm,
27
- client=client,
28
- max_steps=15,
29
- memory_enabled=True, # Enable built-in conversation memory
30
- )
31
-
32
- print("\n===== Interactive MCP Chat =====")
33
- print("Type 'exit' or 'quit' to end the conversation")
34
- print("Type 'clear' to clear conversation history")
35
- print("==================================\n")
36
-
37
- try:
38
- # Main chat loop
39
- while True:
40
- # Get user input
41
- user_input = input("\nYou: ")
42
-
43
- # Check for exit command
44
- if user_input.lower() in ["exit", "quit"]:
45
- print("Ending conversation...")
46
- break
47
-
48
- # Check for clear history command
49
- if user_input.lower() == "clear":
50
- agent.clear_conversation_history()
51
- print("Conversation history cleared.")
52
- continue
53
-
54
- # Get response from agent
55
- print("\nAssistant: ", end="", flush=True)
56
-
57
- try:
58
- # Run the agent with the user input (memory handling is automatic)
59
- response = await agent.run(user_input)
60
- print(response)
61
-
62
- except Exception as e:
63
- print(f"\nError: {e}")
64
-
65
- finally:
66
- # Clean up
67
- if client and client.sessions:
68
- await client.close_all_sessions()
69
-
70
-
71
- if __name__ == "__main__":
72
- asyncio.run(run_memory_chat())
 
1
+ import asyncio
2
+ from dotenv import load_dotenv
3
+ from langchain_groq import ChatGroq
4
+ from mcp_use import MCPAgent, MCPClient
5
+ import os
6
+
7
+ # Load env once
8
+ load_dotenv()
9
+ os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY")
10
+
11
+ # Initialize MCP client and agent once (memory preserved)
12
+ CONFIG_FILE = "server/weather.json"
13
+ client = MCPClient.from_config_file(CONFIG_FILE)
14
+ llm = ChatGroq(model="qwen-qwq-32b")
15
+
16
+ agent = MCPAgent(
17
+ llm=llm,
18
+ client=client,
19
+ max_steps=15,
20
+ memory_enabled=True # Keep memory for conversation
21
+ )
22
+
23
+ async def run_memory_chat(question: str) -> str:
24
+ """Run a single question through the MCPAgent and get a response."""
25
+ try:
26
+ response = await agent.run(question)
27
+ return response
28
+ except Exception as e:
29
+ return f"Error: {e}"