Spaces:
Sleeping
Sleeping
| from typing import TypedDict, Annotated | |
| from tool import add, substract, multiply, divide, DuckDuckGoSearchTool, WikipediaSearchTool, ArxivSearchTool, PubmedSearchTool | |
| from os import getenv | |
| from langgraph.graph.message import add_messages | |
| from langchain_core.messages import AnyMessage, SystemMessage, HumanMessage, AIMessage | |
| from langgraph.graph import StateGraph, START, END, MessagesState | |
| from langgraph.prebuilt import ToolNode, tools_condition | |
| from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace | |
| HUGGINGFACEHUB_API_TOKEN = getenv("HUGGINGFACEHUB_API_TOKEN") | |
| # Making the agent | |
| #llm = HuggingFaceEndpoint( | |
| # repo_id="Qwen/Qwen2.5-Coder-32B-Instruct", | |
| # huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN, | |
| #) | |
| llm = HuggingFaceEndpoint( | |
| repo_id="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf", | |
| ) | |
| chat = ChatHuggingFace(llm=llm, verbose=True) | |
| tools = [add, | |
| substract, | |
| multiply, | |
| divide, | |
| DuckDuckGoSearchTool, | |
| WikipediaSearchTool, | |
| ArxivSearchTool, | |
| PubmedSearchTool] | |
| chat_with_tools = chat.bind_tools(tools) | |
| def simple_graph(): | |
| ## Defining our nodes | |
| def assistant(state: MessagesState): | |
| """Assistant node""" | |
| return {"messages": state["messages"] + [chat_with_tools.invoke(state["messages"])]} | |
| # Build graph / nodes | |
| builder = StateGraph(MessagesState) | |
| builder.add_node("assistant", assistant) # Assistant | |
| builder.add_node("tools", ToolNode(tools)) # Tools | |
| # Logic / edges | |
| builder.add_edge(START, "assistant") | |
| builder.add_conditional_edges("assistant", tools_condition) | |
| builder.add_edge("tools", "assistant") | |
| graph = builder.compile() | |
| return graph |