Notes.AI / app_notes_ai.py
Krish30's picture
Upload 5 files
2e5365a verified
raw
history blame
7.99 kB
import os
import json
from datetime import datetime
import streamlit as st
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_chroma import Chroma
from langchain_groq import ChatGroq
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
from vectorize_documents import embeddings
working_dir = os.path.dirname(os.path.abspath(__file__))
config_data = json.load(open(f"{working_dir}/config.json"))
GROQ_API_KEY = config_data["GROQ_API_KEY"]
os.environ["GROQ_API_KEY"]= GROQ_API_KEY
# Ensure the JSON file exists
chat_history_file = "chat_histories.json"
if not os.path.exists(chat_history_file):
with open(chat_history_file, "w") as f:
json.dump({}, f)
# Functions to handle chat history
def load_chat_history():
with open(chat_history_file, "r") as f:
return json.load(f)
def save_chat_history(chat_histories):
with open(chat_history_file, "w") as f:
json.dump(chat_histories, f, indent=4)
# Function to set up vectorstore
def setup_vectorstore():
embeddings = HuggingFaceEmbeddings()
vectorstore = Chroma(persist_directory="vector_db_dir_notes_ai",
embedding_function=embeddings)
return vectorstore
# Function to set up chatbot chain
def chat_chain(vectorstore):
llm = ChatGroq(
model="llama-3.1-70b-versatile",
temperature=0
)
retriever = vectorstore.as_retriever()
memory = ConversationBufferMemory(
llm=llm,
output_key="answer",
memory_key="chat_history",
return_messages=True
)
chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=retriever,
chain_type="stuff",
memory=memory,
verbose=True,
return_source_documents=True
)
return chain
# Streamlit UI
st.set_page_config(
page_title="Notes.AI",
page_icon="🤖AI",
layout="centered"
)
st.title("🤖 Notes.AI")
st.subheader("Hey! Here you can search for notes of CSE 7th Sem! Read Notes, Read PYQ answers also!!")
# Step 1: Input user's name
if "username" not in st.session_state:
username = st.text_input("Enter your name to proceed:")
if username:
with st.spinner("Loading chatbot interface... Please wait."):
st.session_state.username = username
st.session_state.chat_history = [] # Initialize empty chat history
st.session_state.vectorstore = setup_vectorstore()
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
st.success(f"Welcome, {username}! The chatbot interface is ready.")
else:
username = st.session_state.username
# Step 2: Initialize components if not already set
if "conversational_chain" not in st.session_state:
st.session_state.vectorstore = setup_vectorstore()
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# Step 3: Show chatbot interface
if "username" in st.session_state:
st.subheader(f"Hello {username}, start your query below!")
# Display existing chat history dynamically
for message in st.session_state.chat_history:
if message["role"] == "user":
with st.chat_message("user"):
st.markdown(message["content"])
elif message["role"] == "assistant":
with st.chat_message("assistant"):
st.markdown(message["content"])
# User input section
user_input = st.chat_input("Ask AI....")
if user_input:
with st.spinner("Processing your query... Please wait."):
# Save user input to session state
st.session_state.chat_history.append({"role": "user", "content": user_input})
# Display user's message
with st.chat_message("user"):
st.markdown(user_input)
# Get assistant's response
with st.chat_message("assistant"):
response = st.session_state.conversational_chain({"question": user_input})
assistant_response = response["answer"]
st.markdown(assistant_response)
# Save assistant's response to session state
st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
# Save chat history to file with timestamp
chat_histories = load_chat_history()
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
if username not in chat_histories:
chat_histories[username] = []
chat_histories[username].append({
"timestamp": timestamp,
"user": user_input,
"assistant": assistant_response
})
save_chat_history(chat_histories)
# import os
# import json
# import streamlit as st
# from langchain_huggingface import HuggingFaceEmbeddings
# from langchain_chroma import Chroma
# from langchain_groq import ChatGroq
# from langchain.memory import ConversationBufferMemory
# from langchain.chains import ConversationalRetrievalChain
# from vectorize_documents import embeddings
# working_dir = os.path.dirname(os.path.abspath(__file__))
# config_data = json.load(open(f"{working_dir}/config.json"))
# GROQ_API_KEY = config_data["GROQ_API_KEY"]
# os.environ["GROQ_API_KEY"]= GROQ_API_KEY
# def setup_vectorstore():
# persist_directory = f"{working_dir}/vector_db_dir_notes_ai"
# embeddings = HuggingFaceEmbeddings()
# vectorstore = Chroma(persist_directory=persist_directory,
# embedding_function=embeddings)
# return vectorstore
# def chat_chain(vectorstore):
# llm = ChatGroq(
# model = "llama-3.1-70b-versatile",
# temperature = 0
# )
# retriever = vectorstore.as_retriever()
# memory = ConversationBufferMemory(
# llm = llm,
# output_key = "answer",
# memory_key = "chat_history",
# return_messages = True
# )
# chain = ConversationalRetrievalChain.from_llm(
# llm=llm,
# retriever = retriever,
# chain_type = "stuff",
# memory = memory,
# verbose=True,
# return_source_documents= True
# )
# return chain
# st.set_page_config(
# page_title="Notes.AI",
# page_icon="🤖AI",
# layout="centered"
# )
# st.title("🤖 Notes.AI")
# # st.title("🤖 Hey! Here you can search for notes of CSE 7th Sem! Read Notes, Read PYQ answers also!!")
# st.subheader("Hey! Here you can search for notes of CSE 7th Sem! Read Notes, Read PYQ answers also!!")
# # Additional subheading
# st.subheader("Start your query below to get instant help!")
# if "chat_history" not in st.session_state:
# st.session_state.chat_history = []
# if "vectorstore" not in st.session_state:
# st.session_state.vectorstore = setup_vectorstore()
# if "conversational_chain" not in st.session_state:
# st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# for message in st.session_state.chat_history:
# with st.chat_message(message["role"]):
# st.markdown(message["content"])
# user_input = st.chat_input("Ask AI....")
# if user_input:
# st.session_state.chat_history.append({"role":"user", "content":user_input})
# with st.chat_message("user"):
# st.markdown(user_input)
# with st.chat_message("assistant"):
# response = st.session_state.conversational_chain({"question":user_input})
# assistant_response = response["answer"]
# st.markdown(assistant_response)
# st.session_state.chat_history.append({"role":"assistant","content": assistant_response})