FaroukTomori commited on
Commit
0837167
Β·
verified Β·
1 Parent(s): 4fa55d2

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +47 -1
src/streamlit_app.py CHANGED
@@ -6,6 +6,7 @@ Version: 3.0 - Production Ready, No Demo Mode
6
 
7
  import streamlit as st
8
  import os
 
9
 
10
  # Configure page
11
  st.set_page_config(
@@ -14,6 +15,33 @@ st.set_page_config(
14
  layout="wide"
15
  )
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  # Try to import the fine-tuned model components
18
  try:
19
  from fine import ProgrammingEducationAI, ComprehensiveFeedback
@@ -62,6 +90,14 @@ def main():
62
  st.success("🌐 Using public model - no authentication required")
63
  st.info(f"πŸ“ Model path: FaroukTomori/codellama-7b-programming-education")
64
 
 
 
 
 
 
 
 
 
65
  # Show if model is loaded in session
66
  if 'ai_tutor' in st.session_state:
67
  st.success("βœ… Model loaded in session")
@@ -98,11 +134,21 @@ def main():
98
  st.info(
99
  f"πŸ” Attempting to load model from: {model_path}")
100
 
 
 
 
 
 
 
 
 
101
  ai_tutor = ProgrammingEducationAI(model_path)
102
  st.success(
103
  "βœ… Model class instantiated successfully")
104
 
105
- ai_tutor.load_model()
 
 
106
  st.session_state['ai_tutor'] = ai_tutor
107
  st.success(
108
  "βœ… Fine-tuned model loaded successfully!")
 
6
 
7
  import streamlit as st
8
  import os
9
+ import tempfile
10
 
11
  # Configure page
12
  st.set_page_config(
 
15
  layout="wide"
16
  )
17
 
18
+ # Set up proper cache directories for HF Spaces
19
+
20
+
21
+ def setup_cache_directories():
22
+ """Set up cache directories with proper permissions"""
23
+ try:
24
+ # Create cache directories in writable locations
25
+ cache_dirs = {
26
+ 'HF_HOME': os.path.join(tempfile.gettempdir(), 'huggingface'),
27
+ 'TRANSFORMERS_CACHE': os.path.join(tempfile.gettempdir(), 'huggingface', 'transformers'),
28
+ 'HF_DATASETS_CACHE': os.path.join(tempfile.gettempdir(), 'huggingface', 'datasets'),
29
+ }
30
+
31
+ for env_var, cache_path in cache_dirs.items():
32
+ os.environ[env_var] = cache_path
33
+ os.makedirs(cache_path, exist_ok=True)
34
+ st.info(f"πŸ“ Cache directory set: {env_var} = {cache_path}")
35
+
36
+ return True
37
+ except Exception as e:
38
+ st.error(f"❌ Failed to set up cache directories: {e}")
39
+ return False
40
+
41
+
42
+ # Set up cache directories
43
+ setup_cache_directories()
44
+
45
  # Try to import the fine-tuned model components
46
  try:
47
  from fine import ProgrammingEducationAI, ComprehensiveFeedback
 
90
  st.success("🌐 Using public model - no authentication required")
91
  st.info(f"πŸ“ Model path: FaroukTomori/codellama-7b-programming-education")
92
 
93
+ # Memory optimization info
94
+ st.markdown("---")
95
+ st.markdown("### πŸ’Ύ Memory Optimization")
96
+ st.info("πŸ”§ 8-bit quantization enabled")
97
+ st.info("πŸ“ Proper cache directories configured")
98
+ st.info("⚑ Auto device mapping for efficiency")
99
+ st.warning("⚠️ Model size: ~13GB (quantized to ~7GB)")
100
+
101
  # Show if model is loaded in session
102
  if 'ai_tutor' in st.session_state:
103
  st.success("βœ… Model loaded in session")
 
134
  st.info(
135
  f"πŸ” Attempting to load model from: {model_path}")
136
 
137
+ # Memory optimization settings for HF Spaces
138
+ st.info(
139
+ "πŸ”§ Loading with memory optimization for HF Spaces...")
140
+ st.info(
141
+ "πŸ’Ύ Using 8-bit quantization to reduce memory usage")
142
+ st.info(
143
+ "πŸ“ Using proper cache directories for permissions")
144
+
145
  ai_tutor = ProgrammingEducationAI(model_path)
146
  st.success(
147
  "βœ… Model class instantiated successfully")
148
 
149
+ # Load model with memory optimization
150
+ ai_tutor.load_model(
151
+ load_in_8bit=True, device_map="auto")
152
  st.session_state['ai_tutor'] = ai_tutor
153
  st.success(
154
  "βœ… Fine-tuned model loaded successfully!")