taha454 commited on
Commit
7a44ebd
·
verified ·
1 Parent(s): 9e18008

Upload agent.py

Browse files
Files changed (1) hide show
  1. agent.py +53 -27
agent.py CHANGED
@@ -46,9 +46,14 @@ def get_wiki_relate(state: InfoState) -> InfoState:
46
  # Make a tool from it
47
  wiki_tool = WikipediaQueryRun(api_wrapper=wiki)
48
 
49
- wiki_answer = wiki_tool.run(" ".join(state["main_parts"]))
50
- state['tool_answer'] = wiki_answer
51
- return state
 
 
 
 
 
52
 
53
 
54
  def execute_code(state: InfoState) -> InfoState :
@@ -96,17 +101,28 @@ def preprocess_text(state: dict) -> InfoState:
96
  """
97
  print("Preprocess text (Gemini)...")
98
  # 1️⃣ Prepare the prompt
99
- prompt = (
100
- "Extract the most important content words (nouns, proper names, key concepts) from this question that would help find the best-matching Wikipedia article. "
101
- "If the question is not in English; translate key terms to English for Wikipedia's English edition. "
102
- "Ignore stopwords (like 'who', 'is', 'the', 'of', 'in', 'current', 'what'), filler words, and typos. "
103
- "Focus on entities and topics that exist as Wikipedia page titles. "
104
- "Correct obvious spelling mistakes and expand common abbreviations if needed for better Wikipedia matching.\n\n"
105
- "Question: '" + state["question"] + "'\n\n"
106
- "Output ONLY a valid JSON list of 1–4 corrected, title-cased strings (e.g. [\"President of the United States\", \"Joe Biden\"]). "
107
- "No explanations, no markdown, no extra text, no quotes around words, no trailing commas."
 
 
 
 
 
 
 
 
 
 
108
  )
109
 
 
110
  # 2️⃣ Call Gemini
111
  model = ChatGoogleGenerativeAI(model="gemini-2.5-flash")
112
  response = model.invoke([HumanMessage(content=prompt)]).content.strip()
@@ -138,12 +154,19 @@ def get_answer(state: InfoState) -> InfoState :
138
  print("Getting Answer (Gemini)...")
139
 
140
  prompt = (
141
- "Answer the question based on the context below. "
142
- #"If the question cannot be answered using the information provided, answer with 'I don't know'. "
143
- "Question: " + state["question"] +
144
- "\nContext: " + str(state.get("tool_answer")) +
145
- "\nRewrite answer in clearer, simple way."
 
 
 
 
 
 
146
  )
 
147
  model = ChatGoogleGenerativeAI(model="gemini-2.5-flash")
148
  state["final_answer"] = (model.invoke([HumanMessage(content=prompt)]).content)
149
 
@@ -153,7 +176,7 @@ def get_type(state: InfoState) -> InfoState:
153
  """Choose which tool to use based on question type (WIKI, SEARCH, CODE)."""
154
  print("Getting Type (Gemini)...")
155
 
156
- prompt = "According to the Question " +state["question"] + " Select the best tool to answer WIKI if it's informatative or science question, WebInfo if it need up to data news,MATH if math or date or time You're very serious,just give one word from given"
157
  model = ChatGoogleGenerativeAI(model="gemini-2.5-flash")
158
  state["answer_type"] = (model.invoke([HumanMessage(content=prompt)]).content)
159
 
@@ -168,10 +191,12 @@ def get_search_results(state: InfoState) -> InfoState:
168
 
169
  search = DuckDuckGoSearchRun()
170
 
171
- # Simple text result
172
- state['tool_answer'] = search.run(state["question"]) #" " .join(state["main_parts"]))
173
-
174
- return state
 
 
175
 
176
 
177
  def route(state: InfoState):
@@ -185,7 +210,7 @@ def get_graph():
185
  graph = StateGraph(InfoState)
186
 
187
  # Add nodes
188
- graph.add_node("get_wiki_relate", get_wiki_relate)
189
  graph.add_node("preprocess_text", preprocess_text)
190
  graph.add_node("get_answer", get_answer)
191
  graph.add_node("get_type", get_type)
@@ -204,14 +229,15 @@ def get_graph():
204
  route,
205
  {
206
  "WebInfo": "get_search_results",
207
- "WIKI": "get_wiki_relate",
208
- "MATH": "get_code"
 
209
  }
210
  )
211
 
212
  # Add final edges
213
  graph.add_edge("get_search_results", "get_answer")
214
- graph.add_edge("get_wiki_relate", "get_answer")
215
 
216
  graph.add_edge("get_code", "execute_code")
217
  graph.add_edge("execute_code", "get_answer")
@@ -229,4 +255,4 @@ def ask(compiled_graph,question):
229
 
230
  })
231
 
232
- return legitimate_result['final_answer']
 
46
  # Make a tool from it
47
  wiki_tool = WikipediaQueryRun(api_wrapper=wiki)
48
 
49
+ try:
50
+ wiki_answer = wiki_tool.run(" ".join(state["main_parts"]) + " full wikipedia article about this topic")
51
+ state['tool_answer'] = wiki_answer
52
+ return state
53
+ except Exception as e:
54
+ print("Rate limit Exception")
55
+ state['tool_answer'] = ""
56
+ return state
57
 
58
 
59
  def execute_code(state: InfoState) -> InfoState :
 
101
  """
102
  print("Preprocess text (Gemini)...")
103
  # 1️⃣ Prepare the prompt
104
+ prompt = (
105
+ "We want to find the best-matching English Wikipedia pages for a factual question, "
106
+ "so we must extract only the essential topic names or entities that Wikipedia likely has pages for. "
107
+ "These should include the main subject (e.g., a person, event, place, or concept) and any directly relevant subtopic "
108
+ "(like 'Discography', 'Career', or 'History') if they help narrow the search.\n\n"
109
+
110
+ "Rules:\n"
111
+ "- Output 1 to 3 items maximum.\n"
112
+ "- Use English Wikipedia title format (capitalize each main word).\n"
113
+ "- Translate non-English names or terms to English.\n"
114
+ "- Exclude question words, pronouns, and filler terms.\n"
115
+ "- Fix spelling errors if necessary.\n"
116
+ "- Prefer specific Wikipedia topics over vague ones.\n\n"
117
+
118
+ "Example:\n"
119
+ "Q: 'Who built the Eiffel Tower?'\n"
120
+ "A: [\"Eiffel Tower\", \"Gustave Eiffel\"]\n\n"
121
+ f"Question: '{state['question']}'\n\n"
122
+ "Output ONLY a valid JSON list as described — no explanations, markdown, or extra formatting."
123
  )
124
 
125
+
126
  # 2️⃣ Call Gemini
127
  model = ChatGoogleGenerativeAI(model="gemini-2.5-flash")
128
  response = model.invoke([HumanMessage(content=prompt)]).content.strip()
 
154
  print("Getting Answer (Gemini)...")
155
 
156
  prompt = (
157
+ "You are a knowledgeable assistant that answers questions based on context and common factual knowledge.\n"
158
+ "Use the context first, but if it clearly lacks the needed details, you may rely on well-known public facts "
159
+ "(such as from Wikipedia) that logically complete the answer.\n\n"
160
+ f"Question: {state['question']}\n"
161
+ f"Context:\n{state.get('tool_answer')}\n\n"
162
+ "Instructions:\n"
163
+ "- Focus on producing one short factual answer.\n"
164
+ "- Do not include tool names, prefixes, or metadata.\n"
165
+ "- If the context contains partial hints, you can infer the answer from general knowledge of the same topic.\n"
166
+ "- If absolutely nothing is relevant, reply: I don't know.\n\n"
167
+ "Final Answer:"
168
  )
169
+
170
  model = ChatGoogleGenerativeAI(model="gemini-2.5-flash")
171
  state["final_answer"] = (model.invoke([HumanMessage(content=prompt)]).content)
172
 
 
176
  """Choose which tool to use based on question type (WIKI, SEARCH, CODE)."""
177
  print("Getting Type (Gemini)...")
178
 
179
+ prompt = "According to the Question " +state["question"] + " Select the best tool to answer WIKI if it's informatative or science question, WebInfo if it need up to data news,MATH if math or date or time,LLM if other or have link You're very serious,just give one word from given"
180
  model = ChatGoogleGenerativeAI(model="gemini-2.5-flash")
181
  state["answer_type"] = (model.invoke([HumanMessage(content=prompt)]).content)
182
 
 
191
 
192
  search = DuckDuckGoSearchRun()
193
 
194
+ try:
195
+ state['tool_answer'] = search.run(state["question"]) #" " .join(state["main_parts"]))
196
+ return state
197
+ except Exception:
198
+ state['tool_answer'] = ""
199
+ return state
200
 
201
 
202
  def route(state: InfoState):
 
210
  graph = StateGraph(InfoState)
211
 
212
  # Add nodes
213
+ #graph.add_node("get_wiki_relate", get_wiki_relate)
214
  graph.add_node("preprocess_text", preprocess_text)
215
  graph.add_node("get_answer", get_answer)
216
  graph.add_node("get_type", get_type)
 
229
  route,
230
  {
231
  "WebInfo": "get_search_results",
232
+ "WIKI": "get_search_results",#"get_wiki_relate",
233
+ "MATH": "get_code",
234
+ "LLM": "get_answer"
235
  }
236
  )
237
 
238
  # Add final edges
239
  graph.add_edge("get_search_results", "get_answer")
240
+ #graph.add_edge("get_wiki_relate", "get_answer")
241
 
242
  graph.add_edge("get_code", "execute_code")
243
  graph.add_edge("execute_code", "get_answer")
 
255
 
256
  })
257
 
258
+ return legitimate_result['final_answer'],legitimate_result