The Supervisor is working
Browse files- README.md +1 -1
- agents/__init__.py +0 -0
- agents/api_agent.py +32 -0
- agents/retriever_agent.py +33 -0
- agents/scraping_agent.py +52 -0
- data_ingestion/__init__.py +0 -0
- data_ingestion/get_data.py +35 -0
- faiss_index/index.faiss +0 -0
- faiss_index/index.pkl +0 -0
- orchestrator/supervisor.py +42 -0
- requirements.txt +12 -0
README.md
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
#
|
|
|
|
| 1 |
+
# Multi-Source Multi-Agent Finance Assistant
|
agents/__init__.py
ADDED
|
File without changes
|
agents/api_agent.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_community.tools.yahoo_finance_news import YahooFinanceNewsTool
|
| 2 |
+
from langgraph.prebuilt import create_react_agent
|
| 3 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
import os
|
| 6 |
+
import google.generativeai as genai
|
| 7 |
+
|
| 8 |
+
load_dotenv()
|
| 9 |
+
|
| 10 |
+
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
| 11 |
+
|
| 12 |
+
def get_api_agent():
|
| 13 |
+
return create_react_agent(
|
| 14 |
+
model=ChatGoogleGenerativeAI(model="gemini-2.0-flash"),
|
| 15 |
+
tools=[YahooFinanceNewsTool()],
|
| 16 |
+
prompt=(
|
| 17 |
+
"You are a Financial agent.\n\n"
|
| 18 |
+
"INSTRUCTIONS:\n"
|
| 19 |
+
"- You polls real-time & historical market data.\n"
|
| 20 |
+
"- After you're done with your tasks, respond to the supervisor directly\n"
|
| 21 |
+
"- Respond ONLY with the results of your work, do NOT include ANY other text.\n"
|
| 22 |
+
"- You can use the tools provided to you to get the data."
|
| 23 |
+
),
|
| 24 |
+
name="Financial_agent",
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
# api_agent = get_api_agent()
|
| 28 |
+
|
| 29 |
+
# result = api_agent.invoke({"messages": ["Latest news about Apple?"]})
|
| 30 |
+
|
| 31 |
+
# for i in result["messages"]:
|
| 32 |
+
# i.pretty_print()
|
agents/retriever_agent.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langgraph.prebuilt import create_react_agent
|
| 2 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 3 |
+
from langchain.tools.retriever import create_retriever_tool
|
| 4 |
+
from data_ingestion.get_data import get_vector_store
|
| 5 |
+
from dotenv import load_dotenv
|
| 6 |
+
import google.generativeai as genai
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
load_dotenv()
|
| 10 |
+
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def get_retriever_agent():
|
| 14 |
+
vectorstore = get_vector_store()
|
| 15 |
+
return create_react_agent(
|
| 16 |
+
model=ChatGoogleGenerativeAI(model="gemini-2.0-flash"),
|
| 17 |
+
tools=[create_retriever_tool(vectorstore.as_retriever(), "retrieve_blog_posts", "Search and return information about Lilian Weng blog posts.",)],
|
| 18 |
+
prompt=(
|
| 19 |
+
"You are a retriever agent.\n\n"
|
| 20 |
+
"INSTRUCTIONS:\n"
|
| 21 |
+
"- Get the data from the vector store.\n"
|
| 22 |
+
"- After you're done with your tasks, respond to the supervisor directly\n"
|
| 23 |
+
"- Respond ONLY with the results of your work, do NOT include ANY other text."
|
| 24 |
+
),
|
| 25 |
+
name="retriever_agent",
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
retriever_agent = get_retriever_agent()
|
| 29 |
+
|
| 30 |
+
result = retriever_agent.invoke({"messages": ["Latest news about Apple?"]})
|
| 31 |
+
|
| 32 |
+
for i in result["messages"]:
|
| 33 |
+
i.pretty_print()
|
agents/scraping_agent.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langgraph.prebuilt import create_react_agent
|
| 2 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 3 |
+
from langchain_core.tools import tool
|
| 4 |
+
from langchain_community.document_loaders import WebBaseLoader, PyPDFLoader
|
| 5 |
+
from langchain_community.document_loaders.csv_loader import CSVLoader
|
| 6 |
+
from dotenv import load_dotenv
|
| 7 |
+
import google.generativeai as genai
|
| 8 |
+
import os
|
| 9 |
+
|
| 10 |
+
load_dotenv()
|
| 11 |
+
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
| 12 |
+
|
| 13 |
+
@tool
|
| 14 |
+
def web_loader(url: str) -> str:
|
| 15 |
+
"""Provides the docs of the web of the url provided."""
|
| 16 |
+
doc = WebBaseLoader(url).load()
|
| 17 |
+
s = ''
|
| 18 |
+
for i in doc:
|
| 19 |
+
s += '\n' + i.page_content.strip()
|
| 20 |
+
return s
|
| 21 |
+
|
| 22 |
+
@tool
|
| 23 |
+
def pdf_loader(file_path: str) -> str:
|
| 24 |
+
"""Provides the pdf docs of the file_path provided."""
|
| 25 |
+
doc = PyPDFLoader(file_path).load()
|
| 26 |
+
s = ''
|
| 27 |
+
for i in doc:
|
| 28 |
+
s += '\n' + i.page_content.strip()
|
| 29 |
+
return s
|
| 30 |
+
|
| 31 |
+
@tool
|
| 32 |
+
def csv_loader(file_path: str) -> str:
|
| 33 |
+
"""Provides the csv docs of the file_path provided."""
|
| 34 |
+
doc = CSVLoader(file_path).load()
|
| 35 |
+
s = ''
|
| 36 |
+
for i in doc:
|
| 37 |
+
s += '\n' + i.page_content.strip()
|
| 38 |
+
return s
|
| 39 |
+
|
| 40 |
+
def get_scraping_agent():
|
| 41 |
+
return create_react_agent(
|
| 42 |
+
model=ChatGoogleGenerativeAI(model="gemini-2.0-flash"),
|
| 43 |
+
tools=[web_loader, pdf_loader, csv_loader],
|
| 44 |
+
prompt=(
|
| 45 |
+
"You are a scraping agent.\n\n"
|
| 46 |
+
"INSTRUCTIONS:\n"
|
| 47 |
+
"- Get the data from the web, pdf, csv, etc.\n"
|
| 48 |
+
"- After you're done with your tasks, respond to the supervisor directly\n"
|
| 49 |
+
"- Respond ONLY with the results of your work, do NOT include ANY other text."
|
| 50 |
+
),
|
| 51 |
+
name="scraping_agent",
|
| 52 |
+
)
|
data_ingestion/__init__.py
ADDED
|
File without changes
|
data_ingestion/get_data.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_community.document_loaders import WebBaseLoader
|
| 2 |
+
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
| 3 |
+
from langchain_community.vectorstores import FAISS
|
| 4 |
+
from pypdf import PdfReader
|
| 5 |
+
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
def get_pdf_text(pdf):
|
| 9 |
+
text=""
|
| 10 |
+
pdf_reader= PdfReader(pdf)
|
| 11 |
+
for page in pdf_reader.pages:
|
| 12 |
+
text+= page.extract_text()
|
| 13 |
+
return text
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def get_text_chunks(text):
|
| 17 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=100, chunk_overlap=50)
|
| 18 |
+
chunks = text_splitter.split_text(text)
|
| 19 |
+
return chunks
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def create_vector_store(text:str = "Hello world!"):
|
| 24 |
+
chunks = get_text_chunks(text)
|
| 25 |
+
embeddings = GoogleGenerativeAIEmbeddings(model = "models/gemini-embedding-exp-03-07")
|
| 26 |
+
vector_store = FAISS.from_texts(chunks, embedding=embeddings)
|
| 27 |
+
vector_store.save_local("faiss_index")
|
| 28 |
+
return vector_store
|
| 29 |
+
|
| 30 |
+
def get_vector_store():
|
| 31 |
+
embeddings = GoogleGenerativeAIEmbeddings(model = "models/gemini-embedding-exp-03-07")
|
| 32 |
+
if not os.path.exists("faiss_index"):
|
| 33 |
+
return create_vector_store()
|
| 34 |
+
vectorstore = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
|
| 35 |
+
return vectorstore
|
faiss_index/index.faiss
ADDED
|
Binary file (12.3 kB). View file
|
|
|
faiss_index/index.pkl
ADDED
|
Binary file (349 Bytes). View file
|
|
|
orchestrator/supervisor.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys, os
|
| 2 |
+
top_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
| 3 |
+
sys.path.append(top_dir)
|
| 4 |
+
|
| 5 |
+
from langgraph_supervisor import create_supervisor
|
| 6 |
+
from agents.retriever_agent import get_retriever_agent
|
| 7 |
+
from agents.scraping_agent import get_scraping_agent
|
| 8 |
+
from agents.api_agent import get_api_agent
|
| 9 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 10 |
+
from dotenv import load_dotenv
|
| 11 |
+
import google.generativeai as genai
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
load_dotenv()
|
| 15 |
+
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def get_supervisor():
|
| 19 |
+
retriever_agent = get_retriever_agent()
|
| 20 |
+
scraping_agent = get_scraping_agent()
|
| 21 |
+
api_agent = get_api_agent()
|
| 22 |
+
return create_supervisor(
|
| 23 |
+
model = ChatGoogleGenerativeAI(model="gemini-2.0-flash"),
|
| 24 |
+
agents=[scraping_agent, retriever_agent, api_agent],
|
| 25 |
+
prompt=(
|
| 26 |
+
"You are a supervisor managing three agents:\n"
|
| 27 |
+
"- a scraping agent. Assign scraping-related tasks to this agent\n"
|
| 28 |
+
"- a retriever agent. Provide the link or path to fetch any documment and it will help you with it's content\n"
|
| 29 |
+
"- a api agent. Assign finance-related tasks to this agent\n"
|
| 30 |
+
"Assign work to one agent at a time, do not call agents in parallel.\n"
|
| 31 |
+
"Do not do any work yourself."
|
| 32 |
+
),
|
| 33 |
+
add_handoff_back_messages=True,
|
| 34 |
+
output_mode="full_history",
|
| 35 |
+
).compile()
|
| 36 |
+
|
| 37 |
+
# supervisor = get_supervisor()
|
| 38 |
+
|
| 39 |
+
# result = supervisor.invoke({"messages": ["Latest news about Apple?"]})
|
| 40 |
+
|
| 41 |
+
# for i in result["messages"]:
|
| 42 |
+
# i.pretty_print()
|
requirements.txt
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
langchain
|
| 2 |
+
langgraph
|
| 3 |
+
google-generativeai
|
| 4 |
+
google-ai-generativelanguage
|
| 5 |
+
yfinance
|
| 6 |
+
dotenv
|
| 7 |
+
langchain-community
|
| 8 |
+
langchain-google-genai
|
| 9 |
+
langgraph_supervisor
|
| 10 |
+
faiss-cpu
|
| 11 |
+
pypdf
|
| 12 |
+
streamlit
|