Notes-ai-2R / app.py
Krish30's picture
Update app.py
cb52177 verified
import os
import json
import sqlite3
from datetime import datetime
import streamlit as st
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_chroma import Chroma
from langchain_groq import ChatGroq
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
from vectorize_documents import embeddings
working_dir = os.path.dirname(os.path.abspath(__file__))
config_data = json.load(open(f"{working_dir}/config.json"))
GROQ_API_KEY = config_data["GROQ_API_KEY"]
os.environ["GROQ_API_KEY"]= GROQ_API_KEY
# Set up the database with check_same_thread=False
def setup_db():
conn = sqlite3.connect("chat_history.db", check_same_thread=False) # Ensure thread-safe connection
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS chat_histories (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT,
timestamp TEXT,
day TEXT,
user_message TEXT,
assistant_response TEXT
)
""")
conn.commit()
return conn # Return the connection
# Function to save chat history to SQLite
def save_chat_history(conn, username, timestamp, day, user_message, assistant_response):
cursor = conn.cursor()
cursor.execute("""
INSERT INTO chat_histories (username, timestamp, day, user_message, assistant_response)
VALUES (?, ?, ?, ?, ?)
""", (username, timestamp, day, user_message, assistant_response))
conn.commit()
# Function to set up vectorstore for embeddings
def setup_vectorstore():
embeddings = HuggingFaceEmbeddings()
vectorstore = Chroma(persist_directory="vector_db_2R", embedding_function=embeddings)
return vectorstore
# Function to set up the chatbot chain
def chat_chain(vectorstore):
llm = ChatGroq(model="llama-3.1-70b-versatile", temperature=0)
retriever = vectorstore.as_retriever()
memory = ConversationBufferMemory(
llm=llm,
output_key="answer",
memory_key="chat_history",
return_messages=True
)
chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=retriever,
chain_type="stuff",
memory=memory,
verbose=True,
return_source_documents=True
)
return chain
# Streamlit UI setup
st.set_page_config(page_title="Notes.AI", page_icon="🤖AI", layout="centered")
st.title("🤖 Notes.AI")
st.subheader("Hey! Here you can search for notes of CSE 3rd Sem! Read Notes, Read PYQ answers also!!")
# Step 1: Initialize the connection and check if the user is already logged in
if "conn" not in st.session_state:
st.session_state.conn = setup_db()
if "username" not in st.session_state:
username = st.text_input("Enter your name to proceed:")
if username:
with st.spinner("Loading chatbot interface... Please wait."):
st.session_state.username = username
st.session_state.chat_history = [] # Initialize empty chat history in memory
st.session_state.vectorstore = setup_vectorstore()
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
st.success(f"Welcome, {username}! The chatbot interface is ready.")
else:
username = st.session_state.username
# Step 2: Initialize components if not already set
if "conversational_chain" not in st.session_state:
st.session_state.vectorstore = setup_vectorstore()
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# Step 3: Display the chat history in the UI
if "username" in st.session_state:
st.subheader(f"Hello {username}, start your query below!")
# Display chat history (messages exchanged between user and assistant)
if st.session_state.chat_history:
for message in st.session_state.chat_history:
if message['role'] == 'user':
with st.chat_message("user"):
st.markdown(message["content"])
elif message['role'] == 'assistant':
with st.chat_message("assistant"):
st.markdown(message["content"])
# Input field for the user to type their message
user_input = st.chat_input("Ask AI....")
if user_input:
with st.spinner("Processing your query... Please wait."):
# Save user input to chat history in memory
st.session_state.chat_history.append({"role": "user", "content": user_input})
# Display user's message in chatbot (for UI display)
with st.chat_message("user"):
st.markdown(user_input)
# Get assistant's response from the chain
with st.chat_message("assistant"):
response = st.session_state.conversational_chain({"question": user_input})
assistant_response = response["answer"]
st.markdown(assistant_response)
# Save assistant's response to chat history in memory
st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
# Save the chat history to the database (SQLite)
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
day = datetime.now().strftime("%A") # Get the day of the week (e.g., Monday)
save_chat_history(st.session_state.conn, username, timestamp, day, user_input, assistant_response)
# # Set up the database with check_same_thread=False
# def setup_db():
# conn = sqlite3.connect("chat_history.db", check_same_thread=False) # Ensure thread-safe connection
# cursor = conn.cursor()
# cursor.execute("""
# CREATE TABLE IF NOT EXISTS chat_histories (
# id INTEGER PRIMARY KEY AUTOINCREMENT,
# username TEXT,
# timestamp TEXT,
# day TEXT,
# user_message TEXT,
# assistant_response TEXT
# )
# """)
# conn.commit()
# return conn # Return the connection
# # Function to save chat history to SQLite
# def save_chat_history(conn, username, timestamp, day, user_message, assistant_response):
# cursor = conn.cursor()
# cursor.execute("""
# INSERT INTO chat_histories (username, timestamp, day, user_message, assistant_response)
# VALUES (?, ?, ?, ?, ?)
# """, (username, timestamp, day, user_message, assistant_response))
# conn.commit()
# # Function to load chat history from SQLite
# def load_chat_history(conn, username):
# cursor = conn.cursor()
# cursor.execute("""
# SELECT timestamp, day, user_message, assistant_response
# FROM chat_histories
# WHERE username = ?
# ORDER BY timestamp
# """, (username,))
# chat_history = cursor.fetchall()
# return chat_history
# # Function to set up vectorstore for embeddings
# def setup_vectorstore():
# embeddings = HuggingFaceEmbeddings()
# vectorstore = Chroma(persist_directory="vector_db_dir_notes_ai", embedding_function=embeddings)
# return vectorstore
# # Function to set up the chatbot chain
# def chat_chain(vectorstore):
# llm = ChatGroq(
# model="llama-3.1-70b-versatile",
# temperature=0
# )
# retriever = vectorstore.as_retriever()
# memory = ConversationBufferMemory(
# llm=llm,
# output_key="answer",
# memory_key="chat_history",
# return_messages=True
# )
# chain = ConversationalRetrievalChain.from_llm(
# llm=llm,
# retriever=retriever,
# chain_type="stuff",
# memory=memory,
# verbose=True,
# return_source_documents=True
# )
# return chain
# # Streamlit UI setup
# st.set_page_config(
# page_title="Notes.AI",
# page_icon="🤖AI",
# layout="centered"
# )
# st.title("🤖 Notes.AI")
# st.subheader("Hey! Here you can search for notes of CSE 7th Sem! Read Notes, Read PYQ answers also!!")
# # Step 1: Initialize the connection and check if the user is already logged in
# if "conn" not in st.session_state:
# st.session_state.conn = setup_db()
# if "username" not in st.session_state:
# username = st.text_input("Enter your name to proceed:")
# if username:
# with st.spinner("Loading chatbot interface... Please wait."):
# st.session_state.username = username
# st.session_state.chat_history = []
# st.session_state.vectorstore = setup_vectorstore()
# st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# st.success(f"Welcome, {username}! The chatbot interface is ready.")
# else:
# username = st.session_state.username
# # Step 2: Initialize components if not already set
# if "conversational_chain" not in st.session_state:
# st.session_state.vectorstore = setup_vectorstore()
# st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# # Step 3: Show chatbot interface
# if "username" in st.session_state:
# st.subheader(f"Hello {username}, start your query below!")
# user_input = st.chat_input("Ask AI....")
# if user_input:
# with st.spinner("Processing your query... Please wait."):
# # Save user input to chat history
# st.session_state.chat_history.append({"role": "user", "content": user_input})
# # Display user's message
# with st.chat_message("user"):
# st.markdown(user_input)
# # Get assistant's response
# with st.chat_message("assistant"):
# response = st.session_state.conversational_chain({"question": user_input})
# assistant_response = response["answer"]
# st.markdown(assistant_response)
# # Save response to chat history
# st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
# # Save chat history to SQLite database with timestamp
# timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# day = datetime.now().strftime("%A") # Get the day of the week (e.g., Monday)
# save_chat_history(st.session_state.conn, username, timestamp, day, user_input, assistant_response)
# # Display chat history for the current user
# if "username" in st.session_state:
# st.subheader(f"Chat History for {username}:")
# chat_history = load_chat_history(st.session_state.conn, username)
# if chat_history:
# for entry in chat_history:
# timestamp, day, user_message, assistant_response = entry
# st.write(f"**{day} - {timestamp}:**")
# st.write(f"**User:** {user_message}")
# st.write(f"**Assistant:** {assistant_response}")
# else:
# st.write("No chat history available.")
# import os
# import json
# from datetime import datetime
# import streamlit as st
# from langchain_huggingface import HuggingFaceEmbeddings
# from langchain_chroma import Chroma
# from langchain_groq import ChatGroq
# from langchain.memory import ConversationBufferMemory
# from langchain.chains import ConversationalRetrievalChain
# # Ensure the JSON file exists
# chat_history_file = "chat_histories.json"
# if not os.path.exists(chat_history_file):
# with open(chat_history_file, "w") as f:
# json.dump({}, f)
# # Functions to handle chat history
# def load_chat_history():
# with open(chat_history_file, "r") as f:
# return json.load(f)
# def save_chat_history(chat_histories):
# with open(chat_history_file, "w") as f:
# json.dump(chat_histories, f, indent=4)
# # Function to set up vectorstore
# def setup_vectorstore():
# embeddings = HuggingFaceEmbeddings()
# vectorstore = Chroma(persist_directory="vector_db_dir_notes_ai",
# embedding_function=embeddings)
# return vectorstore
# # Function to set up chatbot chain
# def chat_chain(vectorstore):
# llm = ChatGroq(
# model="llama-3.1-70b-versatile",
# temperature=0
# )
# retriever = vectorstore.as_retriever()
# memory = ConversationBufferMemory(
# llm=llm,
# output_key="answer",
# memory_key="chat_history",
# return_messages=True
# )
# chain = ConversationalRetrievalChain.from_llm(
# llm=llm,
# retriever=retriever,
# chain_type="stuff",
# memory=memory,
# verbose=True,
# return_source_documents=True
# )
# return chain
# # Streamlit UI
# st.set_page_config(
# page_title="Notes.AI",
# page_icon="🤖AI",
# layout="centered"
# )
# st.title("🤖 Notes.AI")
# st.subheader("Hey! Here you can search for notes of CSE 7th Sem! Read Notes, Read PYQ answers also!!")
# # Step 1: Input user's name
# if "username" not in st.session_state:
# username = st.text_input("Enter your name to proceed:")
# if username:
# with st.spinner("Loading chatbot interface... Please wait."):
# st.session_state.username = username
# st.session_state.chat_history = [] # Initialize empty chat history
# st.session_state.vectorstore = setup_vectorstore()
# st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# st.success(f"Welcome, {username}! The chatbot interface is ready.")
# else:
# username = st.session_state.username
# # Step 2: Initialize components if not already set
# if "conversational_chain" not in st.session_state:
# st.session_state.vectorstore = setup_vectorstore()
# st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# # Step 3: Show chatbot interface
# if "username" in st.session_state:
# st.subheader(f"Hello {username}, start your query below!")
# # Display existing chat history dynamically
# for message in st.session_state.chat_history:
# if message["role"] == "user":
# with st.chat_message("user"):
# st.markdown(f"{message['day']}: {message['content']}")
# elif message["role"] == "assistant":
# with st.chat_message("assistant"):
# st.markdown(f"{message['day']}: {message['content']}")
# # User input section
# user_input = st.chat_input("Ask AI....")
# if user_input:
# with st.spinner("Processing your query... Please wait."):
# # Save user input to session state
# st.session_state.chat_history.append({"role": "user", "content": user_input})
# # Display user's message
# with st.chat_message("user"):
# st.markdown(user_input)
# # Get assistant's response
# with st.chat_message("assistant"):
# response = st.session_state.conversational_chain({"question": user_input})
# assistant_response = response["answer"]
# st.markdown(assistant_response)
# # Save assistant's response to session state
# st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
# # Save chat history to file with timestamp
# chat_histories = load_chat_history()
# timestamp = datetime.now()
# day = timestamp.strftime("%A") # Get the full weekday name (e.g., Monday)
# formatted_timestamp = timestamp.strftime("%Y-%m-%d %H:%M:%S")
# if username not in chat_histories:
# chat_histories[username] = []
# chat_histories[username].append({
# "timestamp": formatted_timestamp,
# "day": day,
# "user": user_input,
# "assistant": assistant_response
# })
# save_chat_history(chat_histories)
# import os
# import json
# import streamlit as st
# from langchain_huggingface import HuggingFaceEmbeddings
# from langchain_chroma import Chroma
# from langchain_groq import ChatGroq
# from langchain.memory import ConversationBufferMemory
# from langchain.chains import ConversationalRetrievalChain
# from vectorize_documents import embeddings
# working_dir = os.path.dirname(os.path.abspath(__file__))
# config_data = json.load(open(f"{working_dir}/config.json"))
# GROQ_API_KEY = config_data["GROQ_API_KEY"]
# os.environ["GROQ_API_KEY"]= GROQ_API_KEY
# def setup_vectorstore():
# persist_directory = f"{working_dir}/vector_db_dir_notes_ai"
# embeddings = HuggingFaceEmbeddings()
# vectorstore = Chroma(persist_directory=persist_directory,
# embedding_function=embeddings)
# return vectorstore
# def chat_chain(vectorstore):
# llm = ChatGroq(
# model = "llama-3.1-70b-versatile",
# temperature = 0
# )
# retriever = vectorstore.as_retriever()
# memory = ConversationBufferMemory(
# llm = llm,
# output_key = "answer",
# memory_key = "chat_history",
# return_messages = True
# )
# chain = ConversationalRetrievalChain.from_llm(
# llm=llm,
# retriever = retriever,
# chain_type = "stuff",
# memory = memory,
# verbose=True,
# return_source_documents= True
# )
# return chain
# st.set_page_config(
# page_title="Notes.AI",
# page_icon="🤖AI",
# layout="centered"
# )
# st.title("🤖 Notes.AI")
# # st.title("🤖 Hey! Here you can search for notes of CSE 7th Sem! Read Notes, Read PYQ answers also!!")
# st.subheader("Hey! Here you can search for notes of CSE 7th Sem! Read Notes, Read PYQ answers also!!")
# # Additional subheading
# st.subheader("Start your query below to get instant help!")
# if "chat_history" not in st.session_state:
# st.session_state.chat_history = []
# if "vectorstore" not in st.session_state:
# st.session_state.vectorstore = setup_vectorstore()
# if "conversational_chain" not in st.session_state:
# st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# for message in st.session_state.chat_history:
# with st.chat_message(message["role"]):
# st.markdown(message["content"])
# user_input = st.chat_input("Ask AI....")
# if user_input:
# st.session_state.chat_history.append({"role":"user", "content":user_input})
# with st.chat_message("user"):
# st.markdown(user_input)
# with st.chat_message("assistant"):
# response = st.session_state.conversational_chain({"question":user_input})
# assistant_response = response["answer"]
# st.markdown(assistant_response)
# st.session_state.chat_history.append({"role":"assistant","content": assistant_response})