teoat commited on
Commit
075b8c4
·
verified ·
1 Parent(s): b5a8a21

Upload core/cache/advanced_cache.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. core/cache/advanced_cache.py +306 -0
core/cache/advanced_cache.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Redis-based Response Caching System
3
+ Implements intelligent caching for API responses and database queries
4
+ """
5
+
6
+ import hashlib
7
+ import json
8
+ import logging
9
+ from functools import wraps
10
+ from typing import Any, Callable, Dict, Optional
11
+
12
+ from redis import Redis
13
+ from redis.exceptions import ConnectionError, TimeoutError
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class CacheManager:
19
+ """Redis-based caching manager for API responses and data"""
20
+
21
+ def __init__(self, redis_client: Redis, default_ttl: int = 300):
22
+ self.redis = redis_client
23
+ self.default_ttl = default_ttl # 5 minutes default
24
+
25
+ def _generate_cache_key(self, prefix: str, *args, **kwargs) -> str:
26
+ """Generate a consistent cache key from function arguments"""
27
+ # Sort kwargs for consistent key generation
28
+ sorted_kwargs = sorted(kwargs.items())
29
+
30
+ # Create a unique string from all arguments
31
+ key_components = [prefix] + [str(arg) for arg in args]
32
+ key_components.extend([f"{k}:{v}" for k, v in sorted_kwargs])
33
+
34
+ key_string = "|".join(key_components)
35
+ return f"cache:{hashlib.md5(key_string.encode()).hexdigest()}"
36
+
37
+ def get(self, key: str) -> Optional[Any]:
38
+ """Get value from cache"""
39
+ try:
40
+ data = self.redis.get(key)
41
+ if data:
42
+ return json.loads(data)
43
+ return None
44
+ except (ConnectionError, TimeoutError) as e:
45
+ logger.warning(f"Cache get failed: {e}")
46
+ return None
47
+
48
+ def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
49
+ """Set value in cache with TTL"""
50
+ try:
51
+ ttl = ttl or self.default_ttl
52
+ data = json.dumps(value, default=str) # Handle datetime serialization
53
+ return bool(self.redis.setex(key, ttl, data))
54
+ except (ConnectionError, TimeoutError, TypeError) as e:
55
+ logger.warning(f"Cache set failed: {e}")
56
+ return False
57
+
58
+ def delete(self, key: str) -> bool:
59
+ """Delete key from cache"""
60
+ try:
61
+ return bool(self.redis.delete(key))
62
+ except (ConnectionError, TimeoutError) as e:
63
+ logger.warning(f"Cache delete failed: {e}")
64
+ return False
65
+
66
+ def delete_pattern(self, pattern: str) -> int:
67
+ """Delete all keys matching pattern"""
68
+ try:
69
+ keys = self.redis.keys(pattern)
70
+ if keys:
71
+ return self.redis.delete(*keys)
72
+ return 0
73
+ except (ConnectionError, TimeoutError) as e:
74
+ logger.warning(f"Cache pattern delete failed: {e}")
75
+ return 0
76
+
77
+ def clear_user_cache(self, user_id: str) -> None:
78
+ """Clear all cache entries for a specific user"""
79
+ patterns = [
80
+ f"cache:user:{user_id}:*",
81
+ f"cache:cases:user:{user_id}:*",
82
+ f"cache:activities:user:{user_id}:*"
83
+ ]
84
+
85
+ for pattern in patterns:
86
+ self.delete_pattern(pattern)
87
+
88
+ def clear_case_cache(self, case_id: str) -> None:
89
+ """Clear all cache entries for a specific case"""
90
+ patterns = [
91
+ f"cache:case:{case_id}:*",
92
+ f"cache:case:{case_id}",
93
+ "cache:cases:list:*" # Clear case listing caches
94
+ ]
95
+
96
+ for pattern in patterns:
97
+ self.delete_pattern(pattern)
98
+
99
+ def cached(self, ttl: Optional[int] = None, key_prefix: str = ""):
100
+ """Decorator for caching function results"""
101
+ def decorator(func: Callable) -> Callable:
102
+ @wraps(func)
103
+ async def wrapper(*args, **kwargs):
104
+ # Generate cache key
105
+ prefix = key_prefix or f"{func.__module__}.{func.__name__}"
106
+ cache_key = self._generate_cache_key(prefix, *args, **kwargs)
107
+
108
+ # Try to get from cache first
109
+ cached_result = self.get(cache_key)
110
+ if cached_result is not None:
111
+ logger.debug(f"Cache hit for {cache_key}")
112
+ return cached_result
113
+
114
+ # Execute function and cache result
115
+ logger.debug(f"Cache miss for {cache_key}, executing function")
116
+ result = await func(*args, **kwargs)
117
+
118
+ # Cache the result
119
+ if result is not None:
120
+ self.set(cache_key, result, ttl)
121
+
122
+ return result
123
+
124
+ return wrapper
125
+ return decorator
126
+
127
+
128
+ class APICacheManager(CacheManager):
129
+ """Specialized cache manager for API responses"""
130
+
131
+ def __init__(self, redis_client: Redis):
132
+ super().__init__(redis_client, default_ttl=300) # 5 minutes
133
+
134
+ # Define TTLs for different types of data
135
+ self.ttls = {
136
+ 'user_profile': 600, # 10 minutes
137
+ 'case_list': 120, # 2 minutes
138
+ 'case_detail': 300, # 5 minutes
139
+ 'analytics': 1800, # 30 minutes
140
+ 'stats': 60, # 1 minute
141
+ 'public_data': 3600, # 1 hour
142
+ }
143
+
144
+ def get_case_list_cache_key(self, filters: Dict[str, Any], page: int = 1, limit: int = 20) -> str:
145
+ """Generate cache key for case listings with filters"""
146
+ # Create a sorted string of active filters
147
+ filter_parts = []
148
+ for key, value in sorted(filters.items()):
149
+ if value is not None:
150
+ filter_parts.append(f"{key}:{value}")
151
+
152
+ filter_string = "|".join(filter_parts) if filter_parts else "all"
153
+ return f"cache:cases:list:{filter_string}:page:{page}:limit:{limit}"
154
+
155
+ def get_case_detail_cache_key(self, case_id: str) -> str:
156
+ """Generate cache key for case details"""
157
+ return f"cache:case:{case_id}:detail"
158
+
159
+ def get_user_cases_cache_key(self, user_id: str, status: Optional[str] = None) -> str:
160
+ """Generate cache key for user's cases"""
161
+ status_part = f":status:{status}" if status else ""
162
+ return f"cache:user:{user_id}:cases{status_part}"
163
+
164
+ def cache_case_list(self, filters: Dict[str, Any], page: int, limit: int, results: list) -> None:
165
+ """Cache case listing results"""
166
+ cache_key = self.get_case_list_cache_key(filters, page, limit)
167
+ self.set(cache_key, results, self.ttls['case_list'])
168
+
169
+ def get_cached_case_list(self, filters: Dict[str, Any], page: int = 1, limit: int = 20) -> Optional[list]:
170
+ """Get cached case listing results"""
171
+ cache_key = self.get_case_list_cache_key(filters, page, limit)
172
+ return self.get(cache_key)
173
+
174
+ def cache_case_detail(self, case_id: str, case_data: Dict[str, Any]) -> None:
175
+ """Cache case detail data"""
176
+ cache_key = self.get_case_detail_cache_key(case_id)
177
+ self.set(cache_key, case_data, self.ttls['case_detail'])
178
+
179
+ def get_cached_case_detail(self, case_id: str) -> Optional[Dict[str, Any]]:
180
+ """Get cached case detail data"""
181
+ cache_key = self.get_case_detail_cache_key(case_id)
182
+ return self.get(cache_key)
183
+
184
+ def invalidate_case_caches(self, case_id: str) -> None:
185
+ """Invalidate all caches related to a case"""
186
+ # Clear specific case caches
187
+ self.clear_case_cache(case_id)
188
+
189
+ # Clear case listing caches (they may be stale)
190
+ self.delete_pattern("cache:cases:list:*")
191
+
192
+ def invalidate_user_caches(self, user_id: str) -> None:
193
+ """Invalidate all caches related to a user"""
194
+ self.clear_user_cache(user_id)
195
+
196
+ def warmup_popular_caches(self) -> None:
197
+ """Warm up frequently accessed caches"""
198
+ # This would be called periodically to ensure popular data is cached
199
+ logger.info("Warming up popular caches...")
200
+
201
+ # Example: Cache recent high-priority cases
202
+ # This would be implemented based on actual usage patterns
203
+ pass
204
+
205
+ def get_cache_stats(self) -> Dict[str, Any]:
206
+ """Get cache performance statistics"""
207
+ try:
208
+ info = self.redis.info()
209
+ return {
210
+ 'connected_clients': info.get('connected_clients', 0),
211
+ 'used_memory': info.get('used_memory_human', '0B'),
212
+ 'total_keys': self.redis.dbsize(),
213
+ 'hit_rate': 'N/A', # Would need additional tracking
214
+ 'evictions': info.get('evicted_keys', 0),
215
+ }
216
+ except Exception as e:
217
+ logger.warning(f"Could not get cache stats: {e}")
218
+ return {'error': str(e)}
219
+
220
+
221
+ class DatabaseQueryCache(CacheManager):
222
+ """Cache manager specifically for database queries"""
223
+
224
+ def __init__(self, redis_client: Redis):
225
+ super().__init__(redis_client, default_ttl=600) # 10 minutes for DB queries
226
+
227
+ def cached_query(self, query_name: str, ttl: Optional[int] = None):
228
+ """Decorator for caching database query results"""
229
+ def decorator(func: Callable) -> Callable:
230
+ @wraps(func)
231
+ async def wrapper(*args, **kwargs):
232
+ # Generate cache key based on query name and parameters
233
+ cache_key = self._generate_cache_key(f"db:{query_name}", *args, **kwargs)
234
+
235
+ # Try cache first
236
+ cached_result = self.get(cache_key)
237
+ if cached_result is not None:
238
+ logger.debug(f"DB cache hit for {query_name}")
239
+ return cached_result
240
+
241
+ # Execute query
242
+ logger.debug(f"DB cache miss for {query_name}, executing query")
243
+ result = await func(*args, **kwargs)
244
+
245
+ # Cache result
246
+ if result is not None:
247
+ self.set(cache_key, result, ttl)
248
+
249
+ return result
250
+
251
+ return wrapper
252
+ return decorator
253
+
254
+ def invalidate_query_cache(self, query_name: str, *args, **kwargs) -> None:
255
+ """Invalidate specific query cache"""
256
+ cache_key = self._generate_cache_key(f"db:{query_name}", *args, **kwargs)
257
+ self.delete(cache_key)
258
+
259
+ def invalidate_all_query_caches(self, query_name: str) -> None:
260
+ """Invalidate all caches for a specific query type"""
261
+ pattern = f"cache:db:{query_name}:*"
262
+ self.delete_pattern(pattern)
263
+
264
+
265
+ # Global instances (would be initialized in app startup)
266
+ cache_manager = None
267
+ api_cache_manager = None
268
+ db_query_cache = None
269
+
270
+
271
+ def init_cache_managers(redis_url: str = "redis://localhost:6379/0") -> None:
272
+ """Initialize global cache managers"""
273
+ global cache_manager, api_cache_manager, db_query_cache
274
+
275
+ try:
276
+ redis_client = Redis.from_url(redis_url, decode_responses=True)
277
+ # Test connection
278
+ redis_client.ping()
279
+
280
+ cache_manager = CacheManager(redis_client)
281
+ api_cache_manager = APICacheManager(redis_client)
282
+ db_query_cache = DatabaseQueryCache(redis_client)
283
+
284
+ logger.info("Cache managers initialized successfully")
285
+
286
+ except Exception as e:
287
+ logger.warning(f"Failed to initialize cache managers: {e}")
288
+ # Continue without caching rather than failing
289
+ cache_manager = None
290
+ api_cache_manager = None
291
+ db_query_cache = None
292
+
293
+
294
+ def get_cache_manager() -> Optional[CacheManager]:
295
+ """Get the global cache manager instance"""
296
+ return cache_manager
297
+
298
+
299
+ def get_api_cache_manager() -> Optional[APICacheManager]:
300
+ """Get the global API cache manager instance"""
301
+ return api_cache_manager
302
+
303
+
304
+ def get_db_query_cache() -> Optional[DatabaseQueryCache]:
305
+ """Get the global database query cache instance"""
306
+ return db_query_cache