Spaces:
Sleeping
Sleeping
File size: 5,710 Bytes
b4aeced fedf640 b4aeced fedf640 b4aeced fedf640 e444821 b4aeced 92a3e36 b4aeced fedf640 e444821 ab9c06b fedf640 92a3e36 30c2f85 0eaf4cb 92a3e36 0eaf4cb 92a3e36 0eaf4cb 0d88ad1 0eaf4cb ab9c06b e444821 668494c 091b108 668494c ab9c06b 92a3e36 ab9c06b 92a3e36 091b108 92a3e36 091b108 668494c ab9c06b b4aeced 0d88ad1 ab9c06b 0d88ad1 ab9c06b 0d88ad1 ab9c06b 0d88ad1 92a3e36 0d88ad1 92a3e36 0d88ad1 92a3e36 0d88ad1 92a3e36 30c2f85 0d88ad1 92a3e36 0d88ad1 92a3e36 0d88ad1 e444821 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 | # =========================================
# 1️⃣ Install imports
# =========================================
import os
import pickle
import faiss
import numpy as np
import gradio as gr
from sentence_transformers import SentenceTransformer
from groq import Groq
# =========================================
# 2️⃣ Load Groq API from HF Secrets
# =========================================
client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
GROQ_MODEL = "llama-3.3-70b-versatile"
# =========================================
# 3️⃣ Load FAISS + Chunks
# =========================================
index = faiss.read_index("faiss_index.bin")
with open("chunks.pkl", "rb") as f:
all_chunks = pickle.load(f)
# Load embedding model (only for query embedding)
embedding_model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
# =========================================
# 4️⃣ Groq Query Function
# =========================================
def groq_query(prompt):
completion = client.chat.completions.create(
messages=[
{"role": "system", "content": "You are a strict university assistant. Answer ONLY using provided context."},
{"role": "user", "content": prompt}
],
model=GROQ_MODEL,
temperature=0,
)
return completion.choices[0].message.content
# =========================================
# 5️⃣ RAG Function
# =========================================
def rag_answer(query, k=3):
if not query.strip():
return "Please enter a valid question."
query_embedding = embedding_model.encode([query])
query_embedding = query_embedding / np.linalg.norm(query_embedding, axis=1, keepdims=True)
query_embedding = query_embedding.astype("float32")
distances, indices = index.search(query_embedding, k)
retrieved_texts = [all_chunks[i] for i in indices[0]]
context = "\n\n".join(retrieved_texts)
prompt = f"""
Use ONLY information below to answer.
If no answer is found, respond:
"The information is not available in the provided documents.
Please check the official University of Baltistan website for more information: https://uobs.edu.pk/"
Context:
{context}
Question:
{query}
"""
return groq_query(prompt)
# =========================================
# Gradio UI
# =========================================
# Sample mock questions
mock_questions = [
"What is the focus of the Botany Department?",
"Who is the Head of Chemistry Department?",
"What programs does the Computer Science Department offer?",
"What environmental challenges does the Earth & Environmental Science Dept address?",
"Who is the Head of Mathematics Department?",
"What is the goal of Educational Development Department?",
"Who leads the Languages and Cultural Studies Department?"
]
# Function to set mock question into input
def set_question(q_text):
return q_text
# Function to respond via RAG
def respond(message, history):
if not history:
history = []
user_entry = {"role": "user", "content": message}
answer = rag_answer(message) # your RAG function
assistant_entry = {"role": "assistant", "content": answer}
history.append(user_entry)
history.append(assistant_entry)
return history, "" # update chat and clear input
with gr.Blocks() as demo:
# App title
gr.Markdown(
"<h1 style='text-align:center; color:#2c3e50;'>🎓 UoBs HoDs Insight AI Assistant</h1>"
)
# Description
gr.Markdown(
"""
<div style="text-align:center; color:#34495e; font-size:16px; max-width:900px; margin:auto;">
Welcome to the <b>UoBs HoDs Insight AI Assistant</b> – your trusted AI-powered guide to the University of Baltistan.
This intelligent chatbot delivers accurate and verified information directly from the official messages of all department heads.
Ask about academic programs, faculty, research initiatives, or departmental objectives, and receive <b>instant, reliable answers</b> strictly sourced from official university communications.
For additional details or to explore further, all information is anchored to the <a href="https://uobs.edu.pk/" target="_blank">official University of Baltistan website</a>.
</div>
"""
)
# Main layout: Left sidebar + Right chatbot
with gr.Row():
# Left column: mock questions
with gr.Column(scale=1):
gr.Markdown("### 💡 Sample Questions")
# create buttons for each question
mock_buttons = []
for q in mock_questions:
btn = gr.Button(q, elem_classes="mock-btn")
mock_buttons.append(btn)
# Right column: chatbot + input
with gr.Column(scale=3):
chatbot = gr.Chatbot()
# Input row: 80% textbox + 20% send button
with gr.Row():
user_input = gr.Textbox(
placeholder="Type your question here...",
lines=1,
scale=8 # 80% width
)
send_btn = gr.Button(
"Send",
variant="primary",
elem_id="send-btn",
scale=2 # 20% width
)
# Bind send button to respond function
send_btn.click(
respond,
inputs=[user_input, chatbot],
outputs=[chatbot, user_input]
)
# Bind each mock question button to input field
for btn, q in zip(mock_buttons, mock_questions):
btn.click(
lambda q=q: q,
outputs=user_input
)
# Launch the app
demo.launch()
|