Spaces:
Sleeping
Sleeping
File size: 3,737 Bytes
553d849 9c40b53 893de1e 4abd040 a4a2855 766670a 553d849 f944c3f 553d849 f91dabb 542758f e52d269 f91dabb 7fbe05a f91dabb 553d849 766670a a4a2855 bd7464a 7b404af 8286586 7b404af 8286586 0f1bf73 8286586 abc8f08 a9a1f7c 553d849 cc43d95 893de1e 553d849 893de1e b15a473 553d849 bfd13b1 0f1bf73 8286586 0f1bf73 f91dabb 0f1bf73 f91dabb bfd13b1 556ab31 0f1bf73 4a22da8 bfd13b1 0f1bf73 bfd13b1 8ecb459 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 | from typing import TypedDict, Annotated
from tool import (add,
substract,
multiply,
divide,
DuckDuckGoSearchTool,
TavilySearchTool,
WikipediaSearchTool,
ArxivSearchTool,
PubmedSearchTool,
save_and_read_file,
download_file_from_url,
extract_text_from_image,
analyze_csv_file,
analyze_excel_file,
extract_video_id,
get_youtube_transcript)
import os
from os import getenv
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, SystemMessage, HumanMessage, AIMessage
from langgraph.graph import StateGraph, START, END, MessagesState
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace, HuggingFaceEmbeddings
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_groq import ChatGroq
from langchain_community.vectorstores import SupabaseVectorStore
from langchain.tools.retriever import create_retriever_tool
from supabase.client import Client, create_client
HUGGINGFACEHUB_API_TOKEN = getenv("HUGGINGFACEHUB_API_TOKEN")
SUPABASE_URL = os.environ.get("SUPABASE_URL")
SUPABASE_SERVICE_ROLE_KEY = os.environ.get("SUPABASE_SERVICE_ROLE_KEY")
TAVILY_API_KEY = os.environ.get("TAVILY_API_KEY")
# load the system prompt from the file
with open("prompt.txt", "r", encoding="utf-8") as f:
system_prompt = f.read()
# System message
sys_msg = SystemMessage(content=system_prompt)
# Loading the assistant
#chat = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
chat = ChatGoogleGenerativeAI(model="gemini-1.5-pro", temperature=0)
tools = [add,
substract,
multiply,
divide,
DuckDuckGoSearchTool,
TavilySearchTool,
WikipediaSearchTool,
ArxivSearchTool,
PubmedSearchTool,
save_and_read_file,
download_file_from_url,
extract_text_from_image,
analyze_csv_file,
analyze_excel_file,
extract_video_id,
get_youtube_transcript
]
chat_with_tools = chat.bind_tools(tools)
def simple_graph():
## Defining our nodes
def assistant(state: MessagesState):
"""Assistant node"""
return {"messages": [chat_with_tools.invoke([sys_msg] + state["messages"])]}
# def retriever(state: MessagesState):
# """Retriever node"""
# similar_question = vector_store.similarity_search(state["messages"][0].content)
# if similar_question: # Check if the list is not empty
# example_msg = HumanMessage(
# content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
# )
# return {"messages": [sys_msg] + state["messages"] + [example_msg]}
# else:
# # Handle the case when no similar questions are found
# return {"messages": [sys_msg] + state["messages"]}
# Build graph / nodes
builder = StateGraph(MessagesState)
#builder.add_node("retriever", retriever) # Retriever
builder.add_node("assistant", assistant) # Assistant
builder.add_node("tools", ToolNode(tools)) # Tools
# Logic / edges
# builder.add_edge(START, "retriever")
# builder.add_edge("retriever", "assistant")
builder.add_edge(START, "assistant")
builder.add_conditional_edges("assistant", tools_condition)
builder.add_edge("tools", "assistant")
graph = builder.compile()
return graph |