Spaces:
Sleeping
Sleeping
File size: 3,946 Bytes
553d849 9c40b53 893de1e 0f964cf a4a2855 766670a 553d849 f944c3f 553d849 f91dabb 542758f e52d269 f91dabb 7fbe05a f91dabb 553d849 766670a a4a2855 766670a 7b404af 8286586 7b404af f91dabb c61d7ba f91dabb 8286586 44bb543 a9a1f7c 553d849 893de1e 553d849 893de1e 5903aec 553d849 bfd13b1 8286586 f91dabb bfd13b1 556ab31 f91dabb 4a22da8 bfd13b1 8286586 bfd13b1 8ecb459 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 | from typing import TypedDict, Annotated
from tool import (add,
substract,
multiply,
divide,
DuckDuckGoSearchTool,
TavilySearchTool,
WikipediaSearchTool,
ArxivSearchTool,
PubmedSearchTool,
save_and_read_file,
download_file_from_url,
extract_text_from_image,
analyze_csv_file,
analyze_excel_file)
import os
from os import getenv
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, SystemMessage, HumanMessage, AIMessage
from langgraph.graph import StateGraph, START, END, MessagesState
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace, HuggingFaceEmbeddings
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_groq import ChatGroq
from langchain_community.vectorstores import SupabaseVectorStore
from langchain.tools.retriever import create_retriever_tool
from supabase.client import Client, create_client
HUGGINGFACEHUB_API_TOKEN = getenv("HUGGINGFACEHUB_API_TOKEN")
SUPABASE_URL = os.environ.get("SUPABASE_URL")
SUPABASE_SERVICE_ROLE_KEY = os.environ.get("SUPABASE_SERVICE_ROLE_KEY")
# load the system prompt from the file
with open("prompt.txt", "r", encoding="utf-8") as f:
system_prompt = f.read()
# build a retriever
embeddings = HuggingFaceEmbeddings(
model_name="sentence-transformers/all-mpnet-base-v2"
) # dim=768
supabase: Client = create_client(
SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY
)
vector_store = SupabaseVectorStore(
client=supabase,
embedding=embeddings,
table_name="documents2",
query_name="match_documents_2",
)
create_retriever_tool = create_retriever_tool(
retriever=vector_store.as_retriever(),
name="Question Search",
description="A tool to retrieve similar questions from a vector store.",
)
# System message
sys_msg = SystemMessage(content=system_prompt)
# Loading the assistant
chat = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
tools = [add,
substract,
multiply,
divide,
DuckDuckGoSearchTool,
TavilySearchTool,
WikipediaSearchTool,
ArxivSearchTool,
PubmedSearchTool,
save_and_read_file,
download_file_from_url,
extract_text_from_image,
analyze_csv_file,
analyze_excel_file]
chat_with_tools = chat.bind_tools(tools)
def simple_graph():
## Defining our nodes
def assistant(state: MessagesState):
"""Assistant node"""
return {"messages": [chat_with_tools.invoke(state["messages"])]}
def retriever(state: MessagesState):
"""Retriever node"""
similar_question = vector_store.similarity_search(state["messages"][0].content)
if similar_question: # Check if the list is not empty
example_msg = HumanMessage(
content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
)
return {"messages": [sys_msg] + state["messages"] + [example_msg]}
else:
# Handle the case when no similar questions are found
return {"messages": [sys_msg] + state["messages"]}
# Build graph / nodes
builder = StateGraph(MessagesState)
builder.add_node("retriever", retriever) # Retriever
builder.add_node("assistant", assistant) # Assistant
builder.add_node("tools", ToolNode(tools)) # Tools
# Logic / edges
builder.add_edge(START, "retriever")
builder.add_edge("retriever", "assistant")
builder.add_conditional_edges("assistant", tools_condition)
builder.add_edge("tools", "assistant")
graph = builder.compile()
return graph |