Spaces:
Sleeping
Sleeping
| import json | |
| import requests | |
| import os | |
| import streamlit as st | |
| import pandas as pd | |
| import openpyxl | |
| import torch | |
| import faiss | |
| from reportlab.lib.pagesizes import letter | |
| from reportlab.pdfgen import canvas | |
| from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
| from sentence_transformers import SentenceTransformer | |
| # β Streamlit UI Setup | |
| st.set_page_config(page_title="AI-Powered Timetable", layout="wide") | |
| st.markdown("<h1 style='text-align: center; color: #4CAF50;'>π AI-Powered Timetable with RAG</h1>", unsafe_allow_html=True) | |
| # β API Key Input | |
| st.sidebar.markdown("## π Enter Hugging Face API Key") | |
| hf_api_key = st.sidebar.text_input("API Key", type="password") | |
| # β File Upload Section | |
| st.sidebar.markdown("## π Upload Your Timetable Files") | |
| uploaded_master = st.sidebar.file_uploader("Upload Master Timetable", type=["xlsx", "pdf"]) | |
| uploaded_lab = st.sidebar.file_uploader("Upload Lab Timetable", type=["xlsx", "pdf"]) | |
| uploaded_classroom = st.sidebar.file_uploader("Upload Classroom Timetable", type=["xlsx", "pdf"]) | |
| uploaded_individual = st.sidebar.file_uploader("Upload Individual Timetable", type=["xlsx", "pdf"]) | |
| uploaded_files = { | |
| "Master Timetable": uploaded_master, | |
| "Lab Timetable": uploaded_lab, | |
| "Classroom Timetable": uploaded_classroom, | |
| "Individual Timetable": uploaded_individual, | |
| } | |
| # β Load Timetable Data | |
| def load_timetable(file): | |
| if not file: | |
| return None | |
| if file.name.endswith('.xlsx'): | |
| wb = openpyxl.load_workbook(file) | |
| sheet = wb.active | |
| return [row for row in sheet.iter_rows(values_only=True)] | |
| elif file.name.endswith('.pdf'): | |
| import PyPDF2 | |
| pdf_reader = PyPDF2.PdfReader(file) | |
| text = "" | |
| for page in pdf_reader.pages: | |
| text += page.extract_text() + "\n" | |
| return text | |
| # β Extract and Store Data | |
| rag_data = {} | |
| for name, file in uploaded_files.items(): | |
| if file: | |
| rag_data[name] = load_timetable(file) | |
| # β Encode Timetable Data with Sentence Embeddings | |
| embedder = SentenceTransformer("all-MiniLM-L6-v2") | |
| data_texts = ["\n".join(map(str, data)) for data in rag_data.values() if data] | |
| data_embeddings = embedder.encode(data_texts, convert_to_tensor=True) | |
| # β Create FAISS Vector Store | |
| dimension = data_embeddings.shape[1] | |
| index = faiss.IndexFlatL2(dimension) | |
| index.add(data_embeddings.cpu().numpy()) | |
| # β Retrieve Relevant Data using RAG | |
| def retrieve_data(query): | |
| query_embedding = embedder.encode([query], convert_to_tensor=True) | |
| D, I = index.search(query_embedding.cpu().numpy(), k=1) | |
| return data_texts[I[0][0]] if I[0][0] < len(data_texts) else "No relevant data found." | |
| # β Ask LLaMA-3-8B with RAG Context | |
| def ask_llama_api(query): | |
| if not hf_api_key: | |
| return "Error: Please enter your API key." | |
| context = retrieve_data(query) # Get relevant timetable data | |
| final_query = f"Based on the timetable data:\n{context}\nAnswer this query: {query}" | |
| url = "https://api-inference.huggingface.co/v1/chat/completions" | |
| headers = { | |
| "Authorization": f"Bearer {hf_api_key}", | |
| "Content-Type": "application/json" | |
| } | |
| payload = { | |
| "model": "meta-llama/Meta-Llama-3-8B", | |
| "messages": [{"role": "user", "content": final_query}], | |
| "max_tokens": 500 | |
| } | |
| response = requests.post(url, headers=headers, json=payload) | |
| if response.status_code == 200: | |
| return response.json()["choices"][0]["message"]["content"] | |
| else: | |
| return f"API Error: {response.status_code} - {response.text}" | |
| # β AI Query Section | |
| st.markdown("## π€ Ask LLaMA-3 AI About Your Timetable") | |
| user_query = st.text_input("Type your question here (e.g., 'Who is free at 10 AM on Monday?')") | |
| if st.button("Ask AI via RAG"): | |
| ai_response = ask_llama_api(user_query) | |
| st.write("π§ **LLaMA-3 AI Suggests:**", ai_response) | |
| # β Auto-Schedule Feature | |
| st.markdown("## π Auto-Schedule Missing Timetable Slots") | |
| selected_file = st.selectbox("Choose a timetable file to auto-fill missing slots:", list(uploaded_files.keys())) | |
| if st.button("Auto-Schedule"): | |
| result = retrieve_data(selected_file) | |
| st.write("β ", result) | |
| # β Display Uploaded Timetables | |
| st.markdown("## π View Uploaded Timetables") | |
| for name, file in uploaded_files.items(): | |
| if file: | |
| df = pd.read_excel(file) if file.name.endswith(".xlsx") else None | |
| st.markdown(f"### {name}") | |
| if df is not None: | |
| st.dataframe(df) | |
| else: | |
| st.text(rag_data[name]) | |
| # β PDF Export Feature | |
| st.sidebar.markdown("## π Export AI Responses to PDF") | |
| if st.sidebar.button("Export as PDF"): | |
| c = canvas.Canvas("Timetable_Responses.pdf", pagesize=letter) | |
| c.drawString(100, 750, "AI-Powered Timetable Responses") | |
| y = 720 | |
| for query, response in [("Example Query", "Example Response")]: # Replace with actual | |
| c.drawString(50, y, f"Q: {query}") | |
| y -= 20 | |
| c.drawString(70, y, f"A: {response}") | |
| y -= 30 | |
| c.save() | |
| st.sidebar.success("PDF Exported: Timetable_Responses.pdf") | |