Spaces:
Sleeping
Sleeping
File size: 4,089 Bytes
553d849 9c40b53 893de1e 91cc21f 893de1e 4abd040 a4a2855 766670a 553d849 f944c3f 553d849 f91dabb 542758f e52d269 f91dabb 7fbe05a a1b0d20 f91dabb fde0325 553d849 766670a a4a2855 bd7464a 2c16de5 7b404af 8286586 7b404af 8286586 0f1bf73 e8ac6c9 b98127d e8ac6c9 8286586 afece27 a9a1f7c 553d849 cc43d95 91cc21f 553d849 893de1e b15a473 553d849 bfd13b1 39728ae e8ac6c9 bfd13b1 556ab31 e8ac6c9 bfd13b1 e8ac6c9 bfd13b1 8ecb459 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
from typing import TypedDict, Annotated
from tool import (add,
substract,
multiply,
divide,
DuckDuckGoSearchTool,
TavilySearchTool,
combined_web_search,
WikipediaSearchTool,
ArxivSearchTool,
PubmedSearchTool,
save_and_read_file,
download_file_from_url,
extract_text_from_image,
analyze_csv_file,
analyze_excel_file,
extract_video_id,
get_youtube_transcript)
import os
from os import getenv
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, SystemMessage, HumanMessage, AIMessage
from langgraph.graph import StateGraph, START, END, MessagesState
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace, HuggingFaceEmbeddings
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_groq import ChatGroq
from langchain_community.vectorstores import SupabaseVectorStore
from langchain.tools.retriever import create_retriever_tool
from langchain_core.rate_limiters import InMemoryRateLimiter
from supabase.client import Client, create_client
import time
HUGGINGFACEHUB_API_TOKEN = getenv("HUGGINGFACEHUB_API_TOKEN")
SUPABASE_URL = os.environ.get("SUPABASE_URL")
SUPABASE_SERVICE_ROLE_KEY = os.environ.get("SUPABASE_SERVICE_ROLE_KEY")
TAVILY_API_KEY = os.environ.get("TAVILY_API_KEY")
GROQ_API_KEY = os.environ.get("GROQ_API_KEY")
# load the system prompt from the file
with open("prompt.txt", "r", encoding="utf-8") as f:
system_prompt = f.read()
# System message
sys_msg = SystemMessage(content=system_prompt)
# build a retriever
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2") # dim=384
supabase: Client = create_client(
SUPABASE_URL,
SUPABASE_SERVICE_ROLE_KEY)
vector_store = SupabaseVectorStore(
client = supabase,
embedding = embeddings,
table_name = "documents",
query_name = "match_documents_langchain",)
retriever_tool = create_retriever_tool(
retriever=vector_store.as_retriever(),
name="Question Search",
description="A tool to retrieve similar questions from a vector store.",)
# Loading the assistant
chat = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
tools = [add,
substract,
multiply,
divide,
DuckDuckGoSearchTool,
TavilySearchTool,
combined_web_search,
WikipediaSearchTool,
ArxivSearchTool,
PubmedSearchTool,
save_and_read_file,
download_file_from_url,
extract_text_from_image,
analyze_csv_file,
analyze_excel_file,
extract_video_id,
get_youtube_transcript
]
chat_with_tools = chat.bind_tools(tools)
def simple_graph():
def assistant(state: MessagesState):
"""Assistant node"""
return {"messages": [chat_with_tools.invoke([sys_msg] + state["messages"])]}
def retriever(state: MessagesState):
"""Retriever node"""
similar_question = vector_store.similarity_search(state["messages"][0].content)
example_msg = HumanMessage(
content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
)
return {"messages": [sys_msg] + state["messages"] + [example_msg]}
# Build graph / nodes
builder = StateGraph(MessagesState)
builder.add_node("retriever", retriever)
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))
# Logic / edges
builder.add_edge(START, "retriever")
builder.add_edge("retriever", "assistant")
builder.add_conditional_edges(
"assistant",
tools_condition,
)
builder.add_edge("tools", "assistant")
graph = builder.compile()
return graph |