mrhenu commited on
Commit
b91b6ec
·
verified ·
1 Parent(s): 0bae48f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -16
app.py CHANGED
@@ -5,12 +5,10 @@ import pandas as pd
5
  from typing import TypedDict, Annotated, Sequence
6
  import operator
7
  from langchain_core.messages import BaseMessage, HumanMessage
8
- from langchain.agents import AgentExecutor
9
  from langchain_community.tools import DuckDuckGoSearchRun
10
- from langchain_huggingface import HuggingFaceEndpoint
11
  from langgraph.graph import StateGraph, END
12
  from langgraph.prebuilt import ToolNode, tools_condition
13
- from langchain_core.prompts import ChatPromptTemplate
14
 
15
  # --- Main Application Logic ---
16
 
@@ -22,30 +20,31 @@ class AgentState(TypedDict):
22
  def create_langgraph_agent():
23
  print("Initializing LangGraph Agent...")
24
 
25
- # 1. Set up the LLM (The "Brain") using the DeepSeek Coder model
26
- llm = HuggingFaceEndpoint(
27
  repo_id="deepseek-ai/deepseek-coder-6.7b-instruct",
28
  task="conversational",
29
- max_new_tokens=1024, # Increased tokens for better reasoning
30
  do_sample=False,
31
  )
32
 
33
- # We use a wrapper to make the model compatible with LangChain's tool calling
34
- from langchain_huggingface.chat_models import HuggingFaceChat
35
- llm_with_tools = HuggingFaceChat(endpoint=llm).bind_tools([DuckDuckGoSearchRun()])
 
 
36
  print("LLM and tools initialized.")
37
 
38
- # 2. Define the agent's logic (the "agent" node)
39
- # We define the graph nodes and edges for the agent's reasoning process
40
  def agent_node(state):
41
  print("Calling agent node...")
42
- response = llm_with_tools.invoke(state["messages"])
43
  return {"messages": [response]}
44
 
45
- tool_node = ToolNode([DuckDuckGoSearchRun()])
46
  print("Graph nodes defined.")
47
 
48
- # 3. Define the Graph
49
  graph = StateGraph(AgentState)
50
  graph.add_node("agent", agent_node)
51
  graph.add_node("tools", tool_node)
@@ -54,7 +53,7 @@ def create_langgraph_agent():
54
  graph.add_conditional_edges("agent", tools_condition)
55
  graph.add_edge("tools", "agent")
56
 
57
- # 4. Compile the graph into a runnable app
58
  app = graph.compile()
59
  print("LangGraph agent compiled and ready.")
60
  return app
@@ -120,7 +119,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
120
 
121
  # Gradio Interface
122
  with gr.Blocks() as demo:
123
- gr.Markdown("# Agent Evaluation Runner (DeepSeek + DuckDuckGo)")
124
  gr.LoginButton()
125
  run_button = gr.Button("Run Evaluation & Submit All Answers")
126
  status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False)
 
5
  from typing import TypedDict, Annotated, Sequence
6
  import operator
7
  from langchain_core.messages import BaseMessage, HumanMessage
 
8
  from langchain_community.tools import DuckDuckGoSearchRun
9
+ from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
10
  from langgraph.graph import StateGraph, END
11
  from langgraph.prebuilt import ToolNode, tools_condition
 
12
 
13
  # --- Main Application Logic ---
14
 
 
20
  def create_langgraph_agent():
21
  print("Initializing LangGraph Agent...")
22
 
23
+ # 1. Set up the LLM Endpoint connection
24
+ llm_endpoint = HuggingFaceEndpoint(
25
  repo_id="deepseek-ai/deepseek-coder-6.7b-instruct",
26
  task="conversational",
27
+ max_new_tokens=1024,
28
  do_sample=False,
29
  )
30
 
31
+ # 2. Wrap the endpoint in the ChatHuggingFace class to make it a chat model
32
+ # and bind the tools to it.
33
+ tools = [DuckDuckGoSearchRun()]
34
+ chat_model = ChatHuggingFace(llm=llm_endpoint)
35
+ chat_model_with_tools = chat_model.bind_tools(tools)
36
  print("LLM and tools initialized.")
37
 
38
+ # 3. Define the agent's logic (the "agent" node)
 
39
  def agent_node(state):
40
  print("Calling agent node...")
41
+ response = chat_model_with_tools.invoke(state["messages"])
42
  return {"messages": [response]}
43
 
44
+ tool_node = ToolNode(tools)
45
  print("Graph nodes defined.")
46
 
47
+ # 4. Define the Graph
48
  graph = StateGraph(AgentState)
49
  graph.add_node("agent", agent_node)
50
  graph.add_node("tools", tool_node)
 
53
  graph.add_conditional_edges("agent", tools_condition)
54
  graph.add_edge("tools", "agent")
55
 
56
+ # 5. Compile the graph into a runnable app
57
  app = graph.compile()
58
  print("LangGraph agent compiled and ready.")
59
  return app
 
119
 
120
  # Gradio Interface
121
  with gr.Blocks() as demo:
122
+ gr.Markdown("# Agent Evaluation Runner (DeepSeek + LangGraph)")
123
  gr.LoginButton()
124
  run_button = gr.Button("Run Evaluation & Submit All Answers")
125
  status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False)