akashraut commited on
Commit
8b76481
·
verified ·
1 Parent(s): 7526f3a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -45
app.py CHANGED
@@ -1,33 +1,44 @@
 
 
1
  import gradio as gr
2
  import asyncio, json, os, sys
3
  from dotenv import load_dotenv
4
  from langchain_core.tools import Tool
5
- from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
6
- from langchain_core.messages import HumanMessage, AIMessage
7
- from langchain.agents import initialize_agent, AgentType
8
- from langchain.agents.agent_executor import AgentExecutor
9
  from langchain_groq import ChatGroq
 
10
 
11
  load_dotenv()
12
 
13
- # --- MCP Client ---
14
  class MCPClient:
 
 
15
  def __init__(self, command: str, args: list):
16
- self.process, self._lock, self._cmd, self._req_id = None, asyncio.Lock(), [command] + args, 0
17
-
 
 
 
18
  async def _send_request(self, method: str, params: dict = None) -> dict:
19
  async with self._lock:
20
  self._req_id += 1
21
- request = {"jsonrpc": "2.0", "method": method, "params": params or {}, "id": self._req_id}
 
 
 
 
 
22
  self.process.stdin.write(json.dumps(request).encode() + b'\n')
23
  await self.process.stdin.drain()
 
24
  while line := await self.process.stdout.readline():
25
  response = json.loads(line)
26
  if response.get("id") == self._req_id:
27
- if "error" in response: raise RuntimeError(f"Server error: {response['error']}")
 
28
  return response["result"]
29
  raise ConnectionError("Server process closed unexpectedly.")
30
-
31
  async def get_tools(self) -> list[Tool]:
32
  self.process = await asyncio.create_subprocess_exec(
33
  *self._cmd,
@@ -39,72 +50,70 @@ class MCPClient:
39
  Tool(
40
  name=s['name'],
41
  description=s['description'],
42
- func=None,
43
  coroutine=self._create_tool_coro(s['name']),
44
  args_schema=s['args_schema']
45
  )
46
  for s in tool_schemas
47
  ]
48
-
49
  def _create_tool_coro(self, tool_name: str):
50
  async def _tool_coro(tool_input):
51
- return await self._send_request("execute", {"tool_name": tool_name, "tool_args": tool_input})
 
 
 
52
  return _tool_coro
53
 
54
- # --- Agent Executor ---
 
55
  _agent_executor = None
56
 
57
  async def get_agent_executor():
 
58
  global _agent_executor
59
  if _agent_executor is None:
60
  if not os.getenv("GROQ_API_KEY"):
61
- raise ValueError("GROQ_API_KEY not set.")
62
  client = MCPClient(command=sys.executable, args=["server.py"])
63
  tools = await client.get_tools()
64
- model = ChatGroq(model="llama3-groq-70b-8192-tool-use-preview", temperature=0)
65
-
66
- prompt = ChatPromptTemplate.from_messages([
67
- ("system", "You are a helpful assistant for gold and silver price information and forecasts."),
68
- MessagesPlaceholder(variable_name="chat_history", optional=True),
69
- ("human", "{input}"),
70
- MessagesPlaceholder(variable_name="agent_scratchpad"),
71
- ])
72
-
73
- # 🔧 only this line changed from create_openai_functions_agent(...)
74
- agent = create_tool_calling_agent(llm=model, tools=tools, prompt=prompt)
75
-
76
- _agent_executor = AgentExecutor(
77
- agent=agent,
78
- tools=tools,
79
- verbose=False,
80
- handle_parsing_errors=True,
81
- max_iterations=5,
82
- )
83
  return _agent_executor
84
 
85
- # --- Chat Handler ---
 
86
  async def respond_to_chat(message: str, history: list):
87
  agent = await get_agent_executor()
88
- chat_history = [
89
- msg
90
- for human, ai in history
91
- for msg in (HumanMessage(content=human), AIMessage(content=ai))
92
- ]
 
 
 
 
93
  try:
94
- response = await agent.ainvoke({"input": message, "chat_history": chat_history})
95
- return response["output"]
96
  except Exception as e:
97
  print(f"ERROR: {e}", file=sys.stderr)
98
  return "Sorry, an error occurred while processing your request."
99
 
100
- # --- Gradio UI ---
 
101
  demo = gr.ChatInterface(
102
  fn=respond_to_chat,
103
  title="Gold & Silver AI Forecast",
104
  description="Ask about live prices and future forecasts for gold and silver.",
105
- examples=["What's the price of silver today?", "Give me a 5-day forecast for gold."],
 
 
 
106
  )
107
 
 
 
108
  if __name__ == "__main__":
109
  demo.launch()
110
-
 
1
+ # app.py
2
+
3
  import gradio as gr
4
  import asyncio, json, os, sys
5
  from dotenv import load_dotenv
6
  from langchain_core.tools import Tool
 
 
 
 
7
  from langchain_groq import ChatGroq
8
+ from langgraph.prebuilt import create_react_agent
9
 
10
  load_dotenv()
11
 
12
+ # --- Compact MCP Client ---
13
  class MCPClient:
14
+ """Manages and communicates with a single tool server subprocess."""
15
+
16
  def __init__(self, command: str, args: list):
17
+ self.process: asyncio.subprocess.Process = None
18
+ self._lock = asyncio.Lock()
19
+ self._cmd = [command] + args
20
+ self._req_id = 0
21
+
22
  async def _send_request(self, method: str, params: dict = None) -> dict:
23
  async with self._lock:
24
  self._req_id += 1
25
+ request = {
26
+ "jsonrpc": "2.0",
27
+ "method": method,
28
+ "params": params or {},
29
+ "id": self._req_id,
30
+ }
31
  self.process.stdin.write(json.dumps(request).encode() + b'\n')
32
  await self.process.stdin.drain()
33
+
34
  while line := await self.process.stdout.readline():
35
  response = json.loads(line)
36
  if response.get("id") == self._req_id:
37
+ if "error" in response:
38
+ raise RuntimeError(f"Server error: {response['error']}")
39
  return response["result"]
40
  raise ConnectionError("Server process closed unexpectedly.")
41
+
42
  async def get_tools(self) -> list[Tool]:
43
  self.process = await asyncio.create_subprocess_exec(
44
  *self._cmd,
 
50
  Tool(
51
  name=s['name'],
52
  description=s['description'],
53
+ func=None, # <-- CRITICAL FIX: Add func=None for async tools
54
  coroutine=self._create_tool_coro(s['name']),
55
  args_schema=s['args_schema']
56
  )
57
  for s in tool_schemas
58
  ]
59
+
60
  def _create_tool_coro(self, tool_name: str):
61
  async def _tool_coro(tool_input):
62
+ return await self._send_request(
63
+ "execute",
64
+ {"tool_name": tool_name, "tool_args": tool_input}
65
+ )
66
  return _tool_coro
67
 
68
+
69
+ # --- Global Agent Executor ---
70
  _agent_executor = None
71
 
72
  async def get_agent_executor():
73
+ """Initializes and returns the agent executor, ensuring it's a singleton."""
74
  global _agent_executor
75
  if _agent_executor is None:
76
  if not os.getenv("GROQ_API_KEY"):
77
+ raise ValueError("GROQ_API_KEY secret not set.")
78
  client = MCPClient(command=sys.executable, args=["server.py"])
79
  tools = await client.get_tools()
80
+ model = ChatGroq(model="openai/gpt-oss-20b")
81
+ _agent_executor = create_react_agent(model, tools)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
  return _agent_executor
83
 
84
+
85
+ # --- Gradio Chat Logic ---
86
  async def respond_to_chat(message: str, history: list):
87
  agent = await get_agent_executor()
88
+
89
+ # Convert history to LangChain format
90
+ history_langchain_format = []
91
+ for human, ai in history:
92
+ history_langchain_format.append({"role": "user", "content": human})
93
+ history_langchain_format.append({"role": "assistant", "content": ai})
94
+
95
+ history_langchain_format.append({"role": "user", "content": message})
96
+
97
  try:
98
+ response = await agent.ainvoke({"messages": history_langchain_format})
99
+ return response['messages'][-1].content
100
  except Exception as e:
101
  print(f"ERROR: {e}", file=sys.stderr)
102
  return "Sorry, an error occurred while processing your request."
103
 
104
+
105
+ # --- Gradio User Interface ---
106
  demo = gr.ChatInterface(
107
  fn=respond_to_chat,
108
  title="Gold & Silver AI Forecast",
109
  description="Ask about live prices and future forecasts for gold and silver.",
110
+ examples=[
111
+ "What's the price of silver today?",
112
+ "Give me a 5-day forecast for gold."
113
+ ]
114
  )
115
 
116
+
117
+ # --- Start the App ---
118
  if __name__ == "__main__":
119
  demo.launch()