kizfestchat / app.py
Bur3hani's picture
Update app.py
2cee6d0 verified
import streamlit as st
st.set_page_config(page_title="KizDar Festival AI", layout="centered")
from langchain.vectorstores import Chroma
from langchain.embeddings import HuggingFaceEmbeddings
from transformers import pipeline
import datetime, os, requests, zipfile
# ----------------------------
# Setup
# ----------------------------
DB_URL = "https://huggingface.co/Bur3hani/kizdarFestival_Assistant/resolve/main/chroma_db.zip"
DB_DIR = "chroma_db"
# Download and extract chroma_db if not present
if not os.path.exists(DB_DIR):
with st.spinner("πŸ”„ Downloading knowledge base..."):
r = requests.get(DB_URL)
with open("chroma_db.zip", "wb") as f:
f.write(r.content)
with zipfile.ZipFile("chroma_db.zip", 'r') as zip_ref:
zip_ref.extractall(DB_DIR)
embedding = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
db = Chroma(persist_directory=DB_DIR, embedding_function=embedding)
qa_pipeline = pipeline("text2text-generation", model="google/flan-t5-base")
# ----------------------------
# Streamlit Config
# ----------------------------
if "history" not in st.session_state:
st.session_state.history = []
if "log" not in st.session_state:
st.session_state.log = []
# ----------------------------
# UI Header
# ----------------------------
st.image("https://placehold.it/150x150", width=150)
st.title("KizDar Festival AI Assistant")
st.caption("Ask about the schedule, DJs, dress code, venues, volunteering, and more.")
# ----------------------------
# Question Box
# ----------------------------
question = st.text_input("🎀 Ask your question:")
# ----------------------------
# Get Response Logic
# ----------------------------
def get_response(question):
docs = db.similarity_search(question, k=4)
context = "\n".join(doc.page_content for doc in docs)
prompt = f"Answer this question clearly and fully:\nQuestion: {question}\nContext: {context}"
result = qa_pipeline(prompt, max_length=512, do_sample=False)
return result[0]['generated_text'].strip()
if question:
with st.spinner("πŸ€– Generating answer..."):
answer = get_response(question)
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
st.session_state.history.append((question, answer))
st.session_state.log.append(f"{timestamp} | Q: {question} | A: {answer[:80]}...")
st.markdown("### βœ… Answer:")
st.success(answer)
# ----------------------------
# Previous Questions
# ----------------------------
if st.session_state.history:
st.markdown("---")
st.markdown("### πŸ•“ Recent Q&A:")
for i, (q, a) in enumerate(reversed(st.session_state.history[-5:]), 1):
st.markdown(f"**{i}. {q}**")
st.markdown(f"*{a}*")
# ----------------------------
# Log View
# ----------------------------
with st.expander("πŸ“œ View Usage Log"):
for entry in st.session_state.log[-10:]:
st.code(entry)