kizfestchat / app /streamlit_app.py
Bur3hani's picture
added files
eb888e3
import streamlit as st
from langchain.vectorstores import Chroma
from langchain.embeddings import HuggingFaceEmbeddings
from transformers import pipeline
import datetime
embedding = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
db = Chroma(persist_directory="./chroma_db", embedding_function=embedding)
qa_pipeline = pipeline("text2text-generation", model="google/flan-t5-base")
if "history" not in st.session_state:
st.session_state.history = []
if "log" not in st.session_state:
st.session_state.log = []
def get_response(question):
docs = db.similarity_search(question, k=4)
context = "\n".join(doc.page_content for doc in docs)
prompt = f"Answer this question clearly and fully:\nQuestion: {question}\nContext: {context}"
result = qa_pipeline(prompt, max_length=512, do_sample=False)
return result[0]['generated_text'].strip()
st.set_page_config(page_title="KizDar Festival Assistant", layout="centered")
st.title("KizDar Festival AI Assistant")
st.caption("Ask about the schedule, DJs, workshops, and more.")
question = st.text_input("Your question:")
if question:
with st.spinner("Generating response..."):
answer = get_response(question)
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
st.session_state.history.append((question, answer))
st.session_state.log.append(f"{timestamp} | Q: {question} | A: {answer[:80]}...")
st.markdown("### Answer:")
st.success(answer)
if st.session_state.history:
st.markdown("---")
st.markdown("### Previous Questions:")
for i, (q, a) in enumerate(reversed(st.session_state.history[-5:]), 1):
st.markdown(f"**{i}. {q}**")
st.markdown(f"*{a}*")
with st.expander("๐Ÿ“ View Usage Log"):
for entry in st.session_state.log[-10:]:
st.code(entry)