kamau1's picture
add unified project overview endpoint with caching and role-based response
19dd95f
"""
Cache utilities for dashboard and other high-frequency data.
Uses cachetools for in-memory caching with TTL (Time To Live).
No external dependencies like Redis needed - perfect for HuggingFace Spaces.
"""
from cachetools import TTLCache
from threading import RLock
from typing import Optional, Any
import logging
logger = logging.getLogger(__name__)
# Thread-safe dashboard cache with 5-minute TTL
# Stores up to 1000 project dashboards in memory
dashboard_cache = TTLCache(maxsize=1000, ttl=300)
dashboard_cache_lock = RLock()
# Trends cache with 10-minute TTL (trends change less frequently)
trends_cache = TTLCache(maxsize=500, ttl=600)
trends_cache_lock = RLock()
# Project overview cache with 12-hour TTL (structure changes rarely)
# Stores project structure: regions, roles, subcontractors, team info
overview_cache = TTLCache(maxsize=500, ttl=43200) # 12 hours = 43200 seconds
overview_cache_lock = RLock()
def get_cached_dashboard(project_id: str, user_id: str) -> Optional[dict]:
"""
Get cached dashboard data for a project and user.
Args:
project_id: UUID of the project
user_id: UUID of the user
Returns:
Cached dashboard dict or None if not found/expired
"""
try:
with dashboard_cache_lock:
key = f"dashboard:{project_id}:{user_id}"
cached_data = dashboard_cache.get(key)
if cached_data:
logger.debug(f"Cache HIT: {key}")
return cached_data
except Exception as e:
logger.error(f"Error retrieving from cache: {e}")
return None
def set_cached_dashboard(project_id: str, user_id: str, data: dict) -> None:
"""
Cache dashboard data for a project and user.
Args:
project_id: UUID of the project
user_id: UUID of the user
data: Dashboard data to cache
"""
try:
with dashboard_cache_lock:
key = f"dashboard:{project_id}:{user_id}"
dashboard_cache[key] = data
logger.debug(f"Cache SET: {key}")
except Exception as e:
logger.error(f"Error setting cache: {e}")
def invalidate_dashboard_cache(project_id: str) -> None:
"""
Invalidate all cached dashboards for a project.
Called when project data changes (new ticket, sales order, etc.)
Args:
project_id: UUID of the project
"""
try:
with dashboard_cache_lock:
# Find all keys for this project
keys_to_delete = [
k for k in dashboard_cache.keys()
if k.startswith(f"dashboard:{project_id}:")
]
# Delete them
for key in keys_to_delete:
dashboard_cache.pop(key, None)
if keys_to_delete:
logger.info(f"Invalidated {len(keys_to_delete)} cache entries for project {project_id}")
except Exception as e:
logger.error(f"Error invalidating cache: {e}")
def get_cached_trends(project_id: str, metric: str, period: str) -> Optional[dict]:
"""
Get cached trend data.
Args:
project_id: UUID of the project
metric: Metric name (sales_orders, tickets, etc.)
period: Time period (7days, 30days, etc.)
Returns:
Cached trends dict or None if not found/expired
"""
try:
with trends_cache_lock:
key = f"trends:{project_id}:{metric}:{period}"
cached_data = trends_cache.get(key)
if cached_data:
logger.debug(f"Trends cache HIT: {key}")
return cached_data
except Exception as e:
logger.error(f"Error retrieving trends from cache: {e}")
return None
def set_cached_trends(project_id: str, metric: str, period: str, data: dict) -> None:
"""
Cache trend data.
Args:
project_id: UUID of the project
metric: Metric name
period: Time period
data: Trends data to cache
"""
try:
with trends_cache_lock:
key = f"trends:{project_id}:{metric}:{period}"
trends_cache[key] = data
logger.debug(f"Trends cache SET: {key}")
except Exception as e:
logger.error(f"Error setting trends cache: {e}")
def invalidate_trends_cache(project_id: str, metric: Optional[str] = None) -> None:
"""
Invalidate trend cache for a project.
Args:
project_id: UUID of the project
metric: Specific metric to invalidate, or None for all
"""
try:
with trends_cache_lock:
if metric:
# Invalidate specific metric
keys_to_delete = [
k for k in trends_cache.keys()
if k.startswith(f"trends:{project_id}:{metric}:")
]
else:
# Invalidate all trends for project
keys_to_delete = [
k for k in trends_cache.keys()
if k.startswith(f"trends:{project_id}:")
]
for key in keys_to_delete:
trends_cache.pop(key, None)
if keys_to_delete:
logger.info(f"Invalidated {len(keys_to_delete)} trends cache entries")
except Exception as e:
logger.error(f"Error invalidating trends cache: {e}")
def clear_all_caches() -> None:
"""
Clear all caches. Use with caution.
Typically only needed for testing or maintenance.
"""
try:
with dashboard_cache_lock:
dashboard_cache.clear()
logger.info("Cleared dashboard cache")
with trends_cache_lock:
trends_cache.clear()
logger.info("Cleared trends cache")
except Exception as e:
logger.error(f"Error clearing caches: {e}")
def get_cached_overview(project_id: str, user_id: str) -> Optional[dict]:
"""
Get cached project overview data.
Args:
project_id: UUID of the project
user_id: UUID of the user
Returns:
Cached overview dict or None if not found/expired
"""
try:
with overview_cache_lock:
key = f"overview:{project_id}:{user_id}"
cached_data = overview_cache.get(key)
if cached_data:
logger.debug(f"Overview cache HIT: {key}")
return cached_data
except Exception as e:
logger.error(f"Error retrieving overview from cache: {e}")
return None
def set_cached_overview(project_id: str, user_id: str, data: dict) -> None:
"""
Cache project overview data.
Args:
project_id: UUID of the project
user_id: UUID of the user
data: Overview data to cache
"""
try:
with overview_cache_lock:
key = f"overview:{project_id}:{user_id}"
overview_cache[key] = data
logger.debug(f"Overview cache SET: {key}")
except Exception as e:
logger.error(f"Error setting overview cache: {e}")
def invalidate_overview_cache(project_id: str) -> None:
"""
Invalidate all cached overviews for a project.
Called when project structure changes (regions, roles, team, subcontractors).
Args:
project_id: UUID of the project
"""
try:
with overview_cache_lock:
keys_to_delete = [
k for k in overview_cache.keys()
if k.startswith(f"overview:{project_id}:")
]
for key in keys_to_delete:
overview_cache.pop(key, None)
if keys_to_delete:
logger.info(f"Invalidated {len(keys_to_delete)} overview cache entries for project {project_id}")
except Exception as e:
logger.error(f"Error invalidating overview cache: {e}")
def get_cache_stats() -> dict:
"""
Get cache statistics for monitoring.
Returns:
Dict with cache size, hits, etc.
"""
try:
with dashboard_cache_lock, trends_cache_lock, overview_cache_lock:
return {
"dashboard_cache": {
"size": len(dashboard_cache),
"maxsize": dashboard_cache.maxsize,
"ttl": dashboard_cache.ttl
},
"trends_cache": {
"size": len(trends_cache),
"maxsize": trends_cache.maxsize,
"ttl": trends_cache.ttl
},
"overview_cache": {
"size": len(overview_cache),
"maxsize": overview_cache.maxsize,
"ttl": overview_cache.ttl
}
}
except Exception as e:
logger.error(f"Error getting cache stats: {e}")
return {}