jake2004 commited on
Commit
689f72f
Β·
verified Β·
1 Parent(s): 79f9aec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -49
app.py CHANGED
@@ -3,21 +3,19 @@ import os
3
  import streamlit as st
4
  import pandas as pd
5
  import openpyxl
6
- import io
7
  import torch
8
  from reportlab.lib.pagesizes import letter
9
  from reportlab.pdfgen import canvas
10
  from huggingface_hub import InferenceClient
11
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
12
 
13
- # Load API key securely
14
- with open("secrets.json", "r") as file:
15
- secrets = json.load(file)
16
- HF_API_KEY = secrets["HF_API_KEY"]
17
 
 
18
  client = InferenceClient(api_token=HF_API_KEY)
19
 
20
- # Load Local Model with Device Optimization
21
  MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.3"
22
  device = "cuda" if torch.cuda.is_available() else "cpu"
23
 
@@ -25,67 +23,50 @@ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
25
  model = AutoModelForCausalLM.from_pretrained(MODEL_NAME).to(device)
26
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0 if device == "cuda" else -1)
27
 
28
- # Initialize session state for uploaded files
29
- if "uploaded_files" not in st.session_state:
30
- st.session_state.uploaded_files = {}
31
-
32
- # Streamlit UI Setup
33
  st.set_page_config(page_title="AI-Powered Timetable", layout="wide")
34
  st.markdown("<h1 style='text-align: center; color: #4CAF50;'>πŸ“… AI-Powered Timetable</h1>", unsafe_allow_html=True)
35
 
36
- # File Upload Section
37
  st.sidebar.markdown("## πŸ“‚ Upload Your Timetable Files")
38
  uploaded_master = st.sidebar.file_uploader("Upload Master Timetable", type=["xlsx"])
39
  uploaded_lab = st.sidebar.file_uploader("Upload Lab Timetable", type=["xlsx"])
40
  uploaded_classroom = st.sidebar.file_uploader("Upload Classroom Timetable", type=["xlsx"])
41
  uploaded_individual = st.sidebar.file_uploader("Upload Individual Timetable", type=["xlsx"])
42
 
43
- # Save uploaded files locally
44
- if uploaded_master:
45
- st.session_state.uploaded_files["Master Timetable"] = uploaded_master
46
- if uploaded_lab:
47
- st.session_state.uploaded_files["Lab Timetable"] = uploaded_lab
48
- if uploaded_classroom:
49
- st.session_state.uploaded_files["Classroom Timetable"] = uploaded_classroom
50
- if uploaded_individual:
51
- st.session_state.uploaded_files["Individual Timetable"] = uploaded_individual
52
-
53
- # Define paths for uploaded files
54
- TIMETABLE_FILES = {name: file for name, file in st.session_state.uploaded_files.items()}
55
-
56
- # Load Timetable Data
57
- def load_timetable(sheet_name):
58
- if sheet_name not in TIMETABLE_FILES:
59
  return None
60
- file = TIMETABLE_FILES[sheet_name]
61
  wb = openpyxl.load_workbook(file)
62
  sheet = wb.active
63
  return [row for row in sheet.iter_rows(values_only=True)]
64
 
65
- # Ask Mistral AI a question using API
66
  def ask_mistral_api(query):
67
  response = client.text_generation(model=MODEL_NAME, inputs=query, max_new_tokens=500)
68
  return response
69
 
70
- # Ask Mistral AI a question using local model
71
  def ask_mistral_local(query):
72
  inputs = tokenizer(query, return_tensors="pt").to(device)
73
  outputs = model.generate(**inputs, max_new_tokens=200)
74
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
75
  return response
76
 
77
- # Auto-Schedule Missing Slots
78
- def auto_schedule(sheet_name):
79
- if sheet_name not in TIMETABLE_FILES:
80
  return "No timetable uploaded."
81
 
82
- file = TIMETABLE_FILES[sheet_name]
83
- local_path = f"temp_{sheet_name.replace(' ', '_')}.xlsx"
84
-
85
- with open(local_path, "wb") as f:
86
- f.write(file.getbuffer())
87
-
88
- wb = openpyxl.load_workbook(local_path)
89
  sheet = wb.active
90
 
91
  empty_slots = []
@@ -99,17 +80,14 @@ def auto_schedule(sheet_name):
99
 
100
  try:
101
  subject, faculty = suggestion.split(", Faculty: ")
102
- subject = subject.replace("Subject: ", "").strip()
103
- faculty = faculty.strip()
104
- sheet.cell(row=row_idx, column=4, value=subject)
105
- sheet.cell(row=row_idx, column=5, value=faculty)
106
  except:
107
  continue
108
 
109
- wb.save(local_path)
110
  return f"Auto-scheduling completed for {len(empty_slots)} slots."
111
 
112
- # AI Query Section
113
  st.markdown("## πŸ€– Ask Mistral AI About Your Timetable")
114
  user_query = st.text_input("Type your question here (e.g., 'Who is free at 10 AM on Monday?')")
115
 
@@ -120,5 +98,3 @@ if st.button("Ask AI via API"):
120
  if st.button("Ask AI via Local Model"):
121
  ai_response = ask_mistral_local(user_query)
122
  st.write("🧠 **Mistral AI Suggests:**", ai_response)
123
-
124
- # πŸš€ Now Your App is Fully Functional & Optimized! πŸš€
 
3
  import streamlit as st
4
  import pandas as pd
5
  import openpyxl
 
6
  import torch
7
  from reportlab.lib.pagesizes import letter
8
  from reportlab.pdfgen import canvas
9
  from huggingface_hub import InferenceClient
10
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
11
 
12
+ # βœ… Load API Key Securely from Hugging Face Secrets
13
+ HF_API_KEY = os.getenv("HF_API_KEY")
 
 
14
 
15
+ # βœ… Initialize Hugging Face API Client
16
  client = InferenceClient(api_token=HF_API_KEY)
17
 
18
+ # βœ… Load Local Model with Device Optimization
19
  MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.3"
20
  device = "cuda" if torch.cuda.is_available() else "cpu"
21
 
 
23
  model = AutoModelForCausalLM.from_pretrained(MODEL_NAME).to(device)
24
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0 if device == "cuda" else -1)
25
 
26
+ # βœ… Streamlit UI Setup
 
 
 
 
27
  st.set_page_config(page_title="AI-Powered Timetable", layout="wide")
28
  st.markdown("<h1 style='text-align: center; color: #4CAF50;'>πŸ“… AI-Powered Timetable</h1>", unsafe_allow_html=True)
29
 
30
+ # βœ… File Upload Section
31
  st.sidebar.markdown("## πŸ“‚ Upload Your Timetable Files")
32
  uploaded_master = st.sidebar.file_uploader("Upload Master Timetable", type=["xlsx"])
33
  uploaded_lab = st.sidebar.file_uploader("Upload Lab Timetable", type=["xlsx"])
34
  uploaded_classroom = st.sidebar.file_uploader("Upload Classroom Timetable", type=["xlsx"])
35
  uploaded_individual = st.sidebar.file_uploader("Upload Individual Timetable", type=["xlsx"])
36
 
37
+ uploaded_files = {
38
+ "Master Timetable": uploaded_master,
39
+ "Lab Timetable": uploaded_lab,
40
+ "Classroom Timetable": uploaded_classroom,
41
+ "Individual Timetable": uploaded_individual,
42
+ }
43
+
44
+ # βœ… Load Timetable Data (Directly from Uploaded File)
45
+ def load_timetable(file):
46
+ if not file:
 
 
 
 
 
 
47
  return None
 
48
  wb = openpyxl.load_workbook(file)
49
  sheet = wb.active
50
  return [row for row in sheet.iter_rows(values_only=True)]
51
 
52
+ # βœ… Ask Mistral AI via API
53
  def ask_mistral_api(query):
54
  response = client.text_generation(model=MODEL_NAME, inputs=query, max_new_tokens=500)
55
  return response
56
 
57
+ # βœ… Ask Mistral AI Locally
58
  def ask_mistral_local(query):
59
  inputs = tokenizer(query, return_tensors="pt").to(device)
60
  outputs = model.generate(**inputs, max_new_tokens=200)
61
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
62
  return response
63
 
64
+ # βœ… Auto-Schedule Missing Slots
65
+ def auto_schedule(file):
66
+ if not file:
67
  return "No timetable uploaded."
68
 
69
+ wb = openpyxl.load_workbook(file)
 
 
 
 
 
 
70
  sheet = wb.active
71
 
72
  empty_slots = []
 
80
 
81
  try:
82
  subject, faculty = suggestion.split(", Faculty: ")
83
+ sheet.cell(row=row_idx, column=4, value=subject.strip())
84
+ sheet.cell(row=row_idx, column=5, value=faculty.strip())
 
 
85
  except:
86
  continue
87
 
 
88
  return f"Auto-scheduling completed for {len(empty_slots)} slots."
89
 
90
+ # βœ… AI Query Section
91
  st.markdown("## πŸ€– Ask Mistral AI About Your Timetable")
92
  user_query = st.text_input("Type your question here (e.g., 'Who is free at 10 AM on Monday?')")
93
 
 
98
  if st.button("Ask AI via Local Model"):
99
  ai_response = ask_mistral_local(user_query)
100
  st.write("🧠 **Mistral AI Suggests:**", ai_response)