saibsund's picture
Upload 4 files
8d25d65 verified
import streamlit as st
import os
from utils import load_index_and_meta, retrieve_top_k
from langchain_groq import ChatGroq
# ---------------------------
# Paths
# ---------------------------
META_PATH = "resources/flyk_chunks_meta.jsonl"
CHUNKS_PATH = "resources/flyk_chunks_clean.jsonl"
INDEX_PATH = "resources/flyk_faiss_clean.index"
# ---------------------------
# Load LLM
# ---------------------------
llm = ChatGroq(model="llama-3.3-70b-versatile", temperature=0)
# ---------------------------
# Load FAISS + metadata
# ---------------------------
meta_list, mapping, index, embed_model = load_index_and_meta(
META_PATH, CHUNKS_PATH, INDEX_PATH
)
# ---------------------------
# Streamlit UI
# ---------------------------
st.set_page_config(page_title="Flykite HR Policy Assistant", layout="wide")
st.title("πŸ›« Flykite Airlines β€” HR Policy Assistant (RAG)")
st.write("Ask any HR policy question. Responses are grounded in the official HR Policy Handbook.")
question = st.text_input("Enter your question:")
if question:
with st.spinner("Retrieving information..."):
retrieved = retrieve_top_k(
query=question,
top_k=5,
min_score=0.25,
index=index,
embed_model=embed_model,
meta_list=meta_list,
mapping=mapping
)
# Build context
context = ""
for r in retrieved:
context += f"(Page {r['page']} β€’ Chunk {r['chunk_id']}):\n{r['text']}\n\n"
prompt = f"""
You are an HR expert assistant for Flykite Airlines.
Use ONLY the context below to answer the question.
Question: {question}
Context:
{context}
Provide answer in:
1. Summary
2. Steps (if applicable)
3. Citations (page + chunk)
"""
with st.spinner("Generating grounded answer..."):
response = llm.invoke(prompt).content
st.subheader("πŸ“˜ Answer")
st.write(response)
with st.expander("πŸ“‚ Retrieved Policy Context"):
for r in retrieved:
st.markdown(f"**Page {r['page']} | Chunk {r['chunk_id']} | Score {r['score']:.3f}**")
st.write(r['text'])
st.markdown("---")