itskavya commited on
Commit
f9d0203
·
verified ·
1 Parent(s): 082b5a4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -8
app.py CHANGED
@@ -7,12 +7,27 @@ from typing import TypedDict, Optional, Annotated
7
  from langchain_core.messages import AnyMessage
8
  from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
9
  from langgraph.graph.message import add_messages
 
 
 
 
10
 
 
 
 
 
 
11
 
12
  class State(TypedDict):
13
  input_file_path = Optional[str]
14
  messages = Annotated[list[AnyMessage], add_messages]
15
 
 
 
 
 
 
 
16
  # (Keep Constants as is)
17
  # --- Constants ---
18
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
@@ -21,17 +36,22 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
21
  # ----- THIS IS WHERE YOU CAN BUILD WHAT YOU WANT ------
22
  class BasicAgent:
23
  def __init__(self):
24
- llm = HuggingFaceEndpoint(repo_id="Qwen/Qwen2.5-Coder-32B-Instruct")
25
- chat = ChatHuggingFace(llm=llm, verbose=True)
26
- messages = [("system", "You are a helpful assistant."),
27
- ("human", "Hey how are you?")]
28
- print(chat.invoke(messages))
 
 
29
  print("BasicAgent initialized.")
30
  def __call__(self, question: str) -> str:
31
  print(f"Agent received question (first 50 chars): {question[:50]}...")
32
- fixed_answer = "This is a default answer."
33
- print(f"Agent returning fixed answer: {fixed_answer}")
34
- return fixed_answer
 
 
 
35
 
36
  def run_and_submit_all( profile: gr.OAuthProfile | None):
37
  """
 
7
  from langchain_core.messages import AnyMessage
8
  from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
9
  from langgraph.graph.message import add_messages
10
+ from langchain_hyperbrowser import HyperbrowserBrowserUseTool
11
+ from langgraph.graph import START, StateGraph
12
+ from langgraph.prebuilt import ToolNode, tools_condition
13
+ from langchain_core.messages import HumanMessage
14
 
15
+ browser_tool = HyperbrowserBrowserUseTool()
16
+ tools = [browser_tool]
17
+ llm = HuggingFaceEndpoint(repo_id="Qwen/Qwen2.5-Coder-32B-Instruct")
18
+ chat = ChatHuggingFace(llm=llm)
19
+ llm_with_tools = chat.bind_tools(tools, parallel_tool_calls=False)
20
 
21
  class State(TypedDict):
22
  input_file_path = Optional[str]
23
  messages = Annotated[list[AnyMessage], add_messages]
24
 
25
+ def assistant(state:State):
26
+ system_message = "You are a helpful assistant. Your job is to answer the questions asked of you as accurately as possible. You have access to a browser search tool, which you may use when needed to answer a question."
27
+ return {
28
+ "messages": [llm_with_tools.invoke([system_message] + state["messages"])]
29
+ }
30
+
31
  # (Keep Constants as is)
32
  # --- Constants ---
33
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
 
36
  # ----- THIS IS WHERE YOU CAN BUILD WHAT YOU WANT ------
37
  class BasicAgent:
38
  def __init__(self):
39
+ workflow = StateGraph(state)
40
+ workflow.add_node("assistant", assistant)
41
+ workflow.add_node("tools", ToolNode([tools]))
42
+ workflow.add_edge(START, "assistant")
43
+ workflow.add_conditional_edge("assistant", tools_condition)
44
+ workflow.add_edge("tools", "assistant")
45
+ app = workflow.compile()
46
  print("BasicAgent initialized.")
47
  def __call__(self, question: str) -> str:
48
  print(f"Agent received question (first 50 chars): {question[:50]}...")
49
+ # fixed_answer = "This is a default answer."
50
+ messages = [HumanMessage(question)]
51
+ answer = app.invoke({"messages": messages})
52
+ # print(f"Agent returning fixed answer: {fixed_answer}")
53
+ print(f"Agent returning fixed answer: {answer}")
54
+ return answer
55
 
56
  def run_and_submit_all( profile: gr.OAuthProfile | None):
57
  """