jake2004 commited on
Commit
037eef9
Β·
verified Β·
1 Parent(s): f3e3ab9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +124 -0
app.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ import streamlit as st
4
+ import pandas as pd
5
+ import openpyxl
6
+ import io
7
+ import torch
8
+ from reportlab.lib.pagesizes import letter
9
+ from reportlab.pdfgen import canvas
10
+ from huggingface_hub import InferenceClient
11
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
12
+
13
+ # Load API key securely
14
+ with open("secrets.json", "r") as file:
15
+ secrets = json.load(file)
16
+ HF_API_KEY = secrets["HF_API_KEY"]
17
+
18
+ client = InferenceClient(api_token=HF_API_KEY)
19
+
20
+ # Load Local Model with Device Optimization
21
+ MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.3"
22
+ device = "cuda" if torch.cuda.is_available() else "cpu"
23
+
24
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
25
+ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME).to(device)
26
+ pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0 if device == "cuda" else -1)
27
+
28
+ # Initialize session state for uploaded files
29
+ if "uploaded_files" not in st.session_state:
30
+ st.session_state.uploaded_files = {}
31
+
32
+ # Streamlit UI Setup
33
+ st.set_page_config(page_title="AI-Powered Timetable", layout="wide")
34
+ st.markdown("<h1 style='text-align: center; color: #4CAF50;'>πŸ“… AI-Powered Timetable</h1>", unsafe_allow_html=True)
35
+
36
+ # File Upload Section
37
+ st.sidebar.markdown("## πŸ“‚ Upload Your Timetable Files")
38
+ uploaded_master = st.sidebar.file_uploader("Upload Master Timetable", type=["xlsx"])
39
+ uploaded_lab = st.sidebar.file_uploader("Upload Lab Timetable", type=["xlsx"])
40
+ uploaded_classroom = st.sidebar.file_uploader("Upload Classroom Timetable", type=["xlsx"])
41
+ uploaded_individual = st.sidebar.file_uploader("Upload Individual Timetable", type=["xlsx"])
42
+
43
+ # Save uploaded files locally
44
+ if uploaded_master:
45
+ st.session_state.uploaded_files["Master Timetable"] = uploaded_master
46
+ if uploaded_lab:
47
+ st.session_state.uploaded_files["Lab Timetable"] = uploaded_lab
48
+ if uploaded_classroom:
49
+ st.session_state.uploaded_files["Classroom Timetable"] = uploaded_classroom
50
+ if uploaded_individual:
51
+ st.session_state.uploaded_files["Individual Timetable"] = uploaded_individual
52
+
53
+ # Define paths for uploaded files
54
+ TIMETABLE_FILES = {name: file for name, file in st.session_state.uploaded_files.items()}
55
+
56
+ # Load Timetable Data
57
+ def load_timetable(sheet_name):
58
+ if sheet_name not in TIMETABLE_FILES:
59
+ return None
60
+ file = TIMETABLE_FILES[sheet_name]
61
+ wb = openpyxl.load_workbook(file)
62
+ sheet = wb.active
63
+ return [row for row in sheet.iter_rows(values_only=True)]
64
+
65
+ # Ask Mistral AI a question using API
66
+ def ask_mistral_api(query):
67
+ response = client.text_generation(model=MODEL_NAME, inputs=query, max_new_tokens=500)
68
+ return response
69
+
70
+ # Ask Mistral AI a question using local model
71
+ def ask_mistral_local(query):
72
+ inputs = tokenizer(query, return_tensors="pt").to(device)
73
+ outputs = model.generate(**inputs, max_new_tokens=200)
74
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
75
+ return response
76
+
77
+ # Auto-Schedule Missing Slots
78
+ def auto_schedule(sheet_name):
79
+ if sheet_name not in TIMETABLE_FILES:
80
+ return "No timetable uploaded."
81
+
82
+ file = TIMETABLE_FILES[sheet_name]
83
+ local_path = f"temp_{sheet_name.replace(' ', '_')}.xlsx"
84
+
85
+ with open(local_path, "wb") as f:
86
+ f.write(file.getbuffer())
87
+
88
+ wb = openpyxl.load_workbook(local_path)
89
+ sheet = wb.active
90
+
91
+ empty_slots = []
92
+ for row_idx, row in enumerate(sheet.iter_rows(min_row=2, values_only=True), start=2):
93
+ if None in row or "" in row:
94
+ empty_slots.append(row_idx)
95
+
96
+ for row_idx in empty_slots:
97
+ query = f"Suggest a subject and faculty for the empty slot in row {row_idx}."
98
+ suggestion = ask_mistral_local(query)
99
+
100
+ try:
101
+ subject, faculty = suggestion.split(", Faculty: ")
102
+ subject = subject.replace("Subject: ", "").strip()
103
+ faculty = faculty.strip()
104
+ sheet.cell(row=row_idx, column=4, value=subject)
105
+ sheet.cell(row=row_idx, column=5, value=faculty)
106
+ except:
107
+ continue
108
+
109
+ wb.save(local_path)
110
+ return f"Auto-scheduling completed for {len(empty_slots)} slots."
111
+
112
+ # AI Query Section
113
+ st.markdown("## πŸ€– Ask Mistral AI About Your Timetable")
114
+ user_query = st.text_input("Type your question here (e.g., 'Who is free at 10 AM on Monday?')")
115
+
116
+ if st.button("Ask AI via API"):
117
+ ai_response = ask_mistral_api(user_query)
118
+ st.write("🧠 **Mistral AI Suggests:**", ai_response)
119
+
120
+ if st.button("Ask AI via Local Model"):
121
+ ai_response = ask_mistral_local(user_query)
122
+ st.write("🧠 **Mistral AI Suggests:**", ai_response)
123
+
124
+ # πŸš€ Now Your App is Fully Functional & Optimized! πŸš€