BasitAliii's picture
Update app.py
5583e1d verified
# =========================================
# 1️⃣ Install imports
# =========================================
import os
import pickle
import faiss
import numpy as np
import gradio as gr
from sentence_transformers import SentenceTransformer
from groq import Groq
# =========================================
# 2️⃣ Load Groq API from HF Secrets
# =========================================
client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
GROQ_MODEL = "llama-3.3-70b-versatile"
# =========================================
# 3️⃣ Load FAISS + Chunks
# =========================================
index = faiss.read_index("faiss_index.bin")
with open("chunks.pkl", "rb") as f:
all_chunks = pickle.load(f)
# Load embedding model (only for query embedding)
embedding_model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
# =========================================
# 4️⃣ Groq Query Function
# =========================================
def groq_query(prompt):
completion = client.chat.completions.create(
messages=[
{"role": "system", "content": "You are a strict university assistant. Answer ONLY using provided context."},
{"role": "user", "content": prompt}
],
model=GROQ_MODEL,
temperature=0,
)
return completion.choices[0].message.content
# =========================================
# 5️⃣ RAG Function
# =========================================
def rag_answer(query, k=3):
if not query.strip():
return "Please enter a valid question."
query_embedding = embedding_model.encode([query])
query_embedding = query_embedding / np.linalg.norm(query_embedding, axis=1, keepdims=True)
query_embedding = query_embedding.astype("float32")
distances, indices = index.search(query_embedding, k)
retrieved_texts = [all_chunks[i] for i in indices[0]]
context = "\n\n".join(retrieved_texts)
prompt = f"""
Use ONLY information below to answer.
If no answer is found, respond:
"The information is not available in the provided documents.
Please check the official University of Baltistan website for more information: https://uobs.edu.pk/"
Context:
{context}
Question:
{query}
"""
return groq_query(prompt)
# =========================================
# Gradio UI
# =========================================
# Sample mock questions
mock_questions = [
"What is the focus of the Botany Department?",
"Who is the Head of Chemistry Department?",
"What programs does the Computer Science Department offer?",
"What environmental challenges does the Earth & Environmental Science Dept address?",
"Who is the Head of Mathematics Department?",
"What is the goal of Educational Development Department?",
"Who leads the Languages and Cultural Studies Department?"
]
# Function to set mock question into input
def set_question(q_text):
return q_text
# Function to respond via RAG
def respond(message, history):
if not history:
history = []
user_entry = {"role": "user", "content": message}
answer = rag_answer(message) # your RAG function
assistant_entry = {"role": "assistant", "content": answer}
history.append(user_entry)
history.append(assistant_entry)
return history, "" # update chat and clear input
with gr.Blocks() as demo:
# App title
gr.Markdown(
"<h1 style='text-align:center; color:#2c3e50;'>🎓 UoBs HoDs Insight AI Assistant</h1>"
)
# Description
gr.Markdown(
"""
<div style="text-align:center; color:#34495e; font-size:16px; max-width:900px; margin:auto;">
Welcome to the <b>UoBs HoDs Insight AI Assistant</b> – your trusted AI-powered guide to the University of Baltistan.
This intelligent chatbot delivers accurate and verified information directly from the official messages of all department heads.
Ask about academic programs, faculty, research initiatives, or departmental objectives, and receive <b>instant, reliable answers</b> strictly sourced from official university communications.
For additional details or to explore further, all information is anchored to the <a href="https://uobs.edu.pk/" target="_blank">official University of Baltistan website</a>.
</div>
"""
)
# Main layout: Left sidebar + Right chatbot
with gr.Row():
# Left column: mock questions
with gr.Column(scale=1):
gr.Markdown("### 💡 Sample Questions")
# create buttons for each question
mock_buttons = []
for q in mock_questions:
btn = gr.Button(q, elem_classes="mock-btn")
mock_buttons.append(btn)
# Right column: chatbot + input
with gr.Column(scale=3):
chatbot = gr.Chatbot()
# Input row: 80% textbox + 20% send button
with gr.Row():
user_input = gr.Textbox(
placeholder="Type your question here...",
lines=1,
scale=8 # 80% width
)
send_btn = gr.Button(
"Send",
variant="primary",
elem_id="send-btn",
scale=2 # 20% width
)
# Bind send button to respond function
send_btn.click(
respond,
inputs=[user_input, chatbot],
outputs=[chatbot, user_input]
)
# Bind each mock question button to input field
for btn, q in zip(mock_buttons, mock_questions):
btn.click(
lambda q=q: q,
outputs=user_input
)
# Launch the app
demo.launch()