Umer797 commited on
Commit
a4a580c
·
verified ·
1 Parent(s): 70414f5

Update graph_builder.py

Browse files
Files changed (1) hide show
  1. graph_builder.py +8 -38
graph_builder.py CHANGED
@@ -1,59 +1,29 @@
1
  from langgraph.graph import StateGraph
2
- from langchain_core.tools import Tool
3
- from langchain_community.chat_models import ChatOpenAI
4
- from langchain_community.tools import TavilySearchResults
5
-
6
-
7
 
8
  def build_graph():
9
  graph = StateGraph(dict)
10
 
11
- # Setup tools
12
- search = TavilySearchResults()
13
-
14
-
15
- def search_step(state):
16
  question = state.get("question")
17
  if not question:
18
  raise ValueError("Missing 'question' in state")
19
- result = search.run(question)
20
- state["search_result"] = result
21
- return state
22
-
23
- def llm_step(state):
24
- llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0)
25
- question = state.get("question")
26
- search_info = state.get("search_result", "")
27
- prompt = f"""
28
- You are solving a GAIA benchmark question.
29
- Here’s the question:
30
- {question}
31
-
32
- Here’s retrieved web info:
33
- {search_info}
34
-
35
- ONLY return the final exact answer (no explanation, no prefix).
36
- """
37
- response = llm.invoke(prompt)
38
- state["llm_output"] = response.content.strip()
39
  return state
40
 
41
  def formatter_step(state):
42
  llm_output = state.get("llm_output")
43
  if not llm_output:
44
  raise ValueError("Missing 'llm_output' in state")
45
- cleaned = llm_output.strip()
46
- state["final_answer"] = cleaned
47
  return state
48
 
49
- # Add nodes
50
- graph.add_node("search", search_step)
51
  graph.add_node("llm", llm_step)
52
  graph.add_node("formatter", formatter_step)
53
-
54
- # Define flow
55
- graph.set_entry_point("search")
56
- graph.add_edge("search", "llm")
57
  graph.add_edge("llm", "formatter")
58
  graph.set_finish_point("formatter")
59
 
 
1
  from langgraph.graph import StateGraph
2
+ from llm_node import llm_node
3
+ from formatter_node import formatter_node
 
 
 
4
 
5
  def build_graph():
6
  graph = StateGraph(dict)
7
 
8
+ def llm_step(state):
 
 
 
 
9
  question = state.get("question")
10
  if not question:
11
  raise ValueError("Missing 'question' in state")
12
+ llm_output = llm_node(question) # ✅ uses HuggingFaceHub now
13
+ state["llm_output"] = llm_output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  return state
15
 
16
  def formatter_step(state):
17
  llm_output = state.get("llm_output")
18
  if not llm_output:
19
  raise ValueError("Missing 'llm_output' in state")
20
+ final_answer = formatter_node(llm_output)
21
+ state["final_answer"] = final_answer
22
  return state
23
 
 
 
24
  graph.add_node("llm", llm_step)
25
  graph.add_node("formatter", formatter_step)
26
+ graph.set_entry_point("llm")
 
 
 
27
  graph.add_edge("llm", "formatter")
28
  graph.set_finish_point("formatter")
29