davron04 commited on
Commit
08a6525
·
verified ·
1 Parent(s): aa75fe8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -38
app.py CHANGED
@@ -1,17 +1,17 @@
1
  import os
2
  import gradio as gr
3
  import requests
4
- import inspect
5
  import pandas as pd
6
 
7
- from typing import TypedDict, Annotated
8
- from langgraph.graph.message import add_messages
9
- from langchain_core.messages import AnyMessage, HumanMessage, AIMessage, SystemMessage
10
- from langgraph.prebuilt import ToolNode
11
- from langgraph.graph import START, StateGraph
12
- from langgraph.prebuilt import tools_condition
13
  from langchain_openai import ChatOpenAI
14
- from langchain_community.tools import DuckDuckGoSearchRun
 
 
 
 
 
 
 
15
 
16
  # (Keep Constants as is)
17
  # --- Constants ---
@@ -28,45 +28,33 @@ Your final answer must include only the answer to the user's question, without a
28
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
29
 
30
  # Generate the AgentState and Agent graph
31
- class AgentState(TypedDict):
32
- messages: Annotated[list[AnyMessage], add_messages]
33
 
34
  class BasicAgent:
35
  def __init__(self):
36
  self.llm = ChatOpenAI(
37
  model="nvidia/nemotron-3-super-120b-a12b:free",
38
  base_url="https://openrouter.ai/api/v1",
39
- api_key=os.environ.get("OPENROUTER_API_KEY")
40
  )
41
- search_tool = DuckDuckGoSearchRun()
42
- tools = [search_tool]
43
- self.llm = self.llm.bind_tools(tools)
44
-
45
- builder = StateGraph(AgentState)
46
-
47
- builder.add_node("assistant", self.assistant)
48
- builder.add_node("tools", ToolNode(tools))
49
-
50
- builder.add_edge(START, "assistant")
51
- builder.add_conditional_edges(
52
- "assistant",
53
- tools_condition
54
  )
55
- builder.add_edge("tools", "assistant")
56
- self.agent = builder.compile()
57
  print("BasicAgent initialized.")
58
- def assistant(self, state: AgentState) -> dict:
59
- return {
60
- "messages": [self.llm.invoke(state["messages"])],
61
- }
62
  def __call__(self, question: str) -> str:
63
- print(f"Agent received question: {question}")
64
- messages = [SystemMessage(content=SYSTEM_PROMPT), HumanMessage(content=question)]
65
- initial_state = AgentState(messages=messages)
66
- response = self.agent.invoke(initial_state)
67
- final_answer = response['messages'][-1].content
68
- print(f"Agent's answer: {final_answer}")
69
- return final_answer
70
 
71
  def run_and_submit_all( profile: gr.OAuthProfile | None):
72
  """
@@ -122,13 +110,14 @@ def run_and_submit_all( profile: gr.OAuthProfile | None):
122
  results_log = []
123
  answers_payload = []
124
  print(f"Running agent on {len(questions_data)} questions...")
125
- for item in questions_data:
126
  task_id = item.get("task_id")
127
  question_text = item.get("question")
128
  if not task_id or question_text is None:
129
  print(f"Skipping item with missing task_id or question: {item}")
130
  continue
131
  try:
 
132
  submitted_answer = agent(question_text)
133
  answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
134
  results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
 
1
  import os
2
  import gradio as gr
3
  import requests
 
4
  import pandas as pd
5
 
 
 
 
 
 
 
6
  from langchain_openai import ChatOpenAI
7
+ from langchain.agents import create_agent
8
+ from langchain.messages import HumanMessage
9
+ from langchain.tools import tool
10
+ from typing import Dict, Any
11
+ from tavily import TavilyClient
12
+
13
+ from dotenv import load_dotenv
14
+ load_dotenv()
15
 
16
  # (Keep Constants as is)
17
  # --- Constants ---
 
28
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
29
 
30
  # Generate the AgentState and Agent graph
 
 
31
 
32
  class BasicAgent:
33
  def __init__(self):
34
  self.llm = ChatOpenAI(
35
  model="nvidia/nemotron-3-super-120b-a12b:free",
36
  base_url="https://openrouter.ai/api/v1",
37
+ api_key=os.getenv("OPENROUTER_API_KEY")
38
  )
39
+ self.tavily_client = TavilyClient(api_key=os.getenv("TAVILY_API_KEY"))
40
+ tools = [self.web_search]
41
+ self.agent = create_agent(
42
+ model=self.llm,
43
+ tools=tools,
44
+ system_prompt=SYSTEM_PROMPT
 
 
 
 
 
 
 
45
  )
 
 
46
  print("BasicAgent initialized.")
47
+ @tool
48
+ def web_search(self, query: str) -> Dict[str, Any]:
49
+ """Search the web for information"""
50
+ return self.tavily_client.search(query)
51
  def __call__(self, question: str) -> str:
52
+ user_prompt = HumanMessage(content=question)
53
+ response = self.agent.invoke(
54
+ {"messages": [user_prompt]}
55
+ )
56
+ answer = response['messages'][-1].content.strip()
57
+ return answer
 
58
 
59
  def run_and_submit_all( profile: gr.OAuthProfile | None):
60
  """
 
110
  results_log = []
111
  answers_payload = []
112
  print(f"Running agent on {len(questions_data)} questions...")
113
+ for i, item in enumerate(questions_data):
114
  task_id = item.get("task_id")
115
  question_text = item.get("question")
116
  if not task_id or question_text is None:
117
  print(f"Skipping item with missing task_id or question: {item}")
118
  continue
119
  try:
120
+ print(f"Answering question {i+1}/{len(questions_data)}")
121
  submitted_answer = agent(question_text)
122
  answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
123
  results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})