Fredrik Sitje commited on
Commit
058983a
Β·
1 Parent(s): ecf7479

Caching get_hf_api(), cached term_category_pairs computation, cached load_users(). This should increase reactivity alot.

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +26 -11
src/streamlit_app.py CHANGED
@@ -30,13 +30,18 @@ if not HF_TOKEN:
30
  st.error("❌ **Configuration Error**: HF_TOKEN is not set. Please configure it in Hugging Face Spaces settings (Variables and secrets).")
31
  st.stop()
32
 
33
- # Initialize HF API - raise exception if initialization fails
34
- try:
35
- login(token=HF_TOKEN)
36
- hf_api = HfApi(token=HF_TOKEN)
37
- except Exception as e:
38
- st.error(f"❌ **Error initializing Hugging Face API**: {str(e)}")
39
- st.stop()
 
 
 
 
 
40
 
41
  @st.cache_data
42
  def load_grading_template():
@@ -132,6 +137,7 @@ def hash_password(password):
132
  """Hash a password using SHA256"""
133
  return hashlib.sha256(password.encode()).hexdigest()
134
 
 
135
  def load_users():
136
  """Load user credentials from Hugging Face Dataset"""
137
  try:
@@ -162,6 +168,10 @@ def save_users(users):
162
  token=HF_TOKEN
163
  )
164
  os.unlink(temp_path)
 
 
 
 
165
  return True
166
  except Exception as e:
167
  st.error(f"❌ **Error saving users to Hugging Face Dataset**: {str(e)}")
@@ -567,11 +577,16 @@ class Term:
567
  return None
568
 
569
 
 
 
 
 
 
 
 
 
570
  # Create a list of unique (term, category) pairs for navigation
571
- # Filter out categories that have no subcategories after filtering Unknown answers
572
- all_pairs = df[['term', 'category']].drop_duplicates().sort_values(['term', 'category']).values.tolist()
573
- term_category_pairs = [(term, category) for term, category in all_pairs
574
- if category_has_subcategories(term, category, df)]
575
  total_pairs = len(term_category_pairs)
576
 
577
  # Cache for Term instances
 
30
  st.error("❌ **Configuration Error**: HF_TOKEN is not set. Please configure it in Hugging Face Spaces settings (Variables and secrets).")
31
  st.stop()
32
 
33
+ @st.cache_resource
34
+ def get_hf_api():
35
+ """Get cached Hugging Face API client - only initializes once per session"""
36
+ try:
37
+ login(token=HF_TOKEN)
38
+ return HfApi(token=HF_TOKEN)
39
+ except Exception as e:
40
+ st.error(f"❌ **Error initializing Hugging Face API**: {str(e)}")
41
+ st.stop()
42
+
43
+ # Initialize HF API - cached to avoid re-initialization on every rerun
44
+ hf_api = get_hf_api()
45
 
46
  @st.cache_data
47
  def load_grading_template():
 
137
  """Hash a password using SHA256"""
138
  return hashlib.sha256(password.encode()).hexdigest()
139
 
140
+ @st.cache_data
141
  def load_users():
142
  """Load user credentials from Hugging Face Dataset"""
143
  try:
 
168
  token=HF_TOKEN
169
  )
170
  os.unlink(temp_path)
171
+
172
+ # Clear cache for users to ensure fresh data on next load
173
+ load_users.clear()
174
+
175
  return True
176
  except Exception as e:
177
  st.error(f"❌ **Error saving users to Hugging Face Dataset**: {str(e)}")
 
577
  return None
578
 
579
 
580
+ @st.cache_data
581
+ def get_term_category_pairs(df):
582
+ """Get filtered term-category pairs, cached to avoid recomputation on every rerun"""
583
+ # Filter out categories that have no subcategories after filtering Unknown answers
584
+ all_pairs = df[['term', 'category']].drop_duplicates().sort_values(['term', 'category']).values.tolist()
585
+ return [(term, category) for term, category in all_pairs
586
+ if category_has_subcategories(term, category, df)]
587
+
588
  # Create a list of unique (term, category) pairs for navigation
589
+ term_category_pairs = get_term_category_pairs(df)
 
 
 
590
  total_pairs = len(term_category_pairs)
591
 
592
  # Cache for Term instances