Frazer2810 commited on
Commit
1f31a19
·
verified ·
1 Parent(s): 759a928

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +20 -22
agent.py CHANGED
@@ -1,4 +1,4 @@
1
- """LangGraph Agent – retry 5s, 30s, 60s; senza Supabase"""
2
 
3
  import os
4
  import time
@@ -20,7 +20,7 @@ from langchain_core.tools import tool
20
  load_dotenv()
21
 
22
  # --------------------------------------------------------------------------- #
23
- # TOOLS #
24
  # --------------------------------------------------------------------------- #
25
  @tool
26
  def multiply(a: int, b: int) -> int:
@@ -94,37 +94,38 @@ tools = [
94
  # --------------------------------------------------------------------------- #
95
  # Retry parameters #
96
  # --------------------------------------------------------------------------- #
97
- RETRY_DELAYS = [0, 5, 30, 60] # 4 tentativi complessivi
98
  MAX_ATTEMPTS = len(RETRY_DELAYS)
99
 
100
  # --------------------------------------------------------------------------- #
101
  # Build LangGraph #
102
  # --------------------------------------------------------------------------- #
103
  def build_graph(provider: str = "groq"):
104
- """Return a LangGraph graph with custom retry logic."""
105
 
106
  # ----------- LLM selection -------------------------------------------- #
107
  if provider == "google":
108
- llm_selected = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
109
 
110
  elif provider == "groq":
111
- llm_selected = ChatGroq(
112
  model="qwen-qwq-32b",
113
  temperature=0,
114
- max_retries=0, # gestiamo noi i retry
115
  )
116
 
117
  elif provider == "huggingface":
118
- llm_selected = ChatHuggingFace(
119
  llm=HuggingFaceEndpoint(
120
- url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
 
121
  temperature=0,
122
  )
123
  )
124
  else:
125
  raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
126
 
127
- llm_with_tools = llm_selected.bind_tools(tools)
128
 
129
  # ---------------- Retry wrapper -------------------------------------- #
130
  def invoke_with_retry(messages):
@@ -136,11 +137,10 @@ def build_graph(provider: str = "groq"):
136
  try:
137
  return llm_with_tools.invoke(messages)
138
  except Exception as e:
139
- err_text = str(e)
140
- if ("503" in err_text or "Service Unavailable" in err_text) and attempt < MAX_ATTEMPTS - 1:
141
  last_err = e
142
- continue # retry
143
- raise # altro errore o tentativi finiti
144
  raise last_err or RuntimeError("Unknown error during LLM invocation")
145
 
146
  # ---------------- Nodes ---------------------------------------------- #
@@ -164,12 +164,10 @@ def build_graph(provider: str = "groq"):
164
  # Stand-alone test #
165
  # --------------------------------------------------------------------------- #
166
  if __name__ == "__main__":
167
- graph = build_graph(provider="groq")
168
- question = (
169
- "When was a picture of St. Thomas Aquinas first added to the Wikipedia "
170
- "page on the Principle of double effect?"
171
- )
172
- msgs = [HumanMessage(content=question)]
173
- result = graph.invoke({"messages": msgs})
174
- for m in result["messages"]:
175
  m.pretty_print()
 
1
+ """LangGraph Agent – retry 5s, 30s, 60s no Supabase"""
2
 
3
  import os
4
  import time
 
20
  load_dotenv()
21
 
22
  # --------------------------------------------------------------------------- #
23
+ # TOOL DEFINITIONS #
24
  # --------------------------------------------------------------------------- #
25
  @tool
26
  def multiply(a: int, b: int) -> int:
 
94
  # --------------------------------------------------------------------------- #
95
  # Retry parameters #
96
  # --------------------------------------------------------------------------- #
97
+ RETRY_DELAYS = [0, 5, 30, 60] # seconds for attempts 0-3
98
  MAX_ATTEMPTS = len(RETRY_DELAYS)
99
 
100
  # --------------------------------------------------------------------------- #
101
  # Build LangGraph #
102
  # --------------------------------------------------------------------------- #
103
  def build_graph(provider: str = "groq"):
104
+ """Return a LangGraph graph with explicit retry logic."""
105
 
106
  # ----------- LLM selection -------------------------------------------- #
107
  if provider == "google":
108
+ llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
109
 
110
  elif provider == "groq":
111
+ llm = ChatGroq(
112
  model="qwen-qwq-32b",
113
  temperature=0,
114
+ max_retries=0, # we handle retries manually
115
  )
116
 
117
  elif provider == "huggingface":
118
+ llm = ChatHuggingFace(
119
  llm=HuggingFaceEndpoint(
120
+ url="https://api-inference.huggingface.co/models/"
121
+ "Meta-DeepLearning/llama-2-7b-chat-hf",
122
  temperature=0,
123
  )
124
  )
125
  else:
126
  raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
127
 
128
+ llm_with_tools = llm.bind_tools(tools)
129
 
130
  # ---------------- Retry wrapper -------------------------------------- #
131
  def invoke_with_retry(messages):
 
137
  try:
138
  return llm_with_tools.invoke(messages)
139
  except Exception as e:
140
+ if ("503" in str(e) or "Service Unavailable" in str(e)) and attempt < MAX_ATTEMPTS - 1:
 
141
  last_err = e
142
+ continue
143
+ raise
144
  raise last_err or RuntimeError("Unknown error during LLM invocation")
145
 
146
  # ---------------- Nodes ---------------------------------------------- #
 
164
  # Stand-alone test #
165
  # --------------------------------------------------------------------------- #
166
  if __name__ == "__main__":
167
+ g = build_graph(provider="groq")
168
+ q = ("When was a picture of St. Thomas Aquinas first added to the Wikipedia "
169
+ "page on the Principle of double effect?")
170
+ msgs = [HumanMessage(content=q)]
171
+ res = g.invoke({"messages": msgs})
172
+ for m in res["messages"]:
 
 
173
  m.pretty_print()