Aya1610 commited on
Commit
f0ed782
·
verified ·
1 Parent(s): 9be756d

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +8 -20
agent.py CHANGED
@@ -3,7 +3,6 @@ from dotenv import load_dotenv
3
  from langgraph.graph import START, END, StateGraph, MessagesState
4
  from langgraph.prebuilt import tools_condition, ToolNode
5
  from langchain_google_genai import ChatGoogleGenerativeAI
6
- from langchain_groq import ChatGroq
7
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
8
  from langchain_community.tools.tavily_search import TavilySearchResults
9
  from langchain_community.document_loaders import WikipediaLoader
@@ -190,11 +189,13 @@ def build_graph(provider: str = "openai"):
190
  # Define nodes
191
  def assistant(state: MessagesState):
192
  """Assistant node - generates responses"""
193
- messages = state["messages"]
194
  # Generate response using LLM
195
- response = llm_with_tools.invoke(messages)
196
  # Return new state with appended message
197
- return {"messages": messages + [response]}
 
 
198
  def retriever(state: MessagesState):
199
  """Retriever node - provides context from vector store"""
200
  messages = state["messages"]
@@ -215,26 +216,13 @@ def build_graph(provider: str = "openai"):
215
  builder = StateGraph(MessagesState)
216
 
217
  # Add nodes
218
- builder.add_node("retriever", retriever)
219
  builder.add_node("assistant", assistant)
220
  builder.add_node("tools", ToolNode(tools))
221
 
222
  # Set up edges
223
- builder.set_entry_point("retriever")
224
- builder.add_edge("retriever", "assistant")
225
-
226
- # Conditional tool usage
227
- builder.add_conditional_edges(
228
- "assistant",
229
- tools_condition,
230
- {
231
- "continue": "tools", # If tools needed
232
- "end": END # If no tools needed
233
- }
234
- )
235
-
236
- # After tools execute, go back to assistant
237
- builder.add_edge("tools", "assistant")
238
 
239
  # Compile graph
240
  return builder.compile()
 
3
  from langgraph.graph import START, END, StateGraph, MessagesState
4
  from langgraph.prebuilt import tools_condition, ToolNode
5
  from langchain_google_genai import ChatGoogleGenerativeAI
 
6
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
7
  from langchain_community.tools.tavily_search import TavilySearchResults
8
  from langchain_community.document_loaders import WikipediaLoader
 
189
  # Define nodes
190
  def assistant(state: MessagesState):
191
  """Assistant node - generates responses"""
192
+ messages = llm_with_tools.invoke(state["messages"])
193
  # Generate response using LLM
194
+ # response = llm_with_tools.invoke(messages)
195
  # Return new state with appended message
196
+ return {"messages": messages}
197
+
198
+
199
  def retriever(state: MessagesState):
200
  """Retriever node - provides context from vector store"""
201
  messages = state["messages"]
 
216
  builder = StateGraph(MessagesState)
217
 
218
  # Add nodes
219
+ # builder.add_node("retriever", retriever)
220
  builder.add_node("assistant", assistant)
221
  builder.add_node("tools", ToolNode(tools))
222
 
223
  # Set up edges
224
+ builder.set_entry_point("assistant")
225
+ builder.set_finish_point("assistant")
 
 
 
 
 
 
 
 
 
 
 
 
 
226
 
227
  # Compile graph
228
  return builder.compile()