firepenguindisopanda
commited on
Commit
·
f8aef8d
1
Parent(s):
a7be63b
Add Upstash Redis caching support and update dependencies
Browse files- .env.example +4 -0
- app/core/cache.py +469 -0
- pyproject.toml +1 -0
- requirements.txt +106 -8
- uv.lock +14 -0
.env.example
CHANGED
|
@@ -50,3 +50,7 @@ CIRCUIT_BREAKER_RECOVERY_TIMEOUT=30
|
|
| 50 |
# Cost Control
|
| 51 |
MAX_TOKENS_PER_REQUEST=50000
|
| 52 |
MONTHLY_TOKEN_BUDGET=10000000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
# Cost Control
|
| 51 |
MAX_TOKENS_PER_REQUEST=50000
|
| 52 |
MONTHLY_TOKEN_BUDGET=10000000
|
| 53 |
+
|
| 54 |
+
# Upstash Redis Configuration (for caching observability metrics)
|
| 55 |
+
UPSTASH_REDIS_REST_URL=https://your-redis-instance.upstash.io
|
| 56 |
+
UPSTASH_REDIS_REST_TOKEN=your_upstash_redis_token_here
|
app/core/cache.py
ADDED
|
@@ -0,0 +1,469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Upstash Redis Cache Service for Observability Dashboard.
|
| 3 |
+
|
| 4 |
+
Provides caching layer for LangSmith metrics with TTL support.
|
| 5 |
+
Uses HTTP-based Redis client optimized for serverless environments.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
import json
|
| 10 |
+
import logging
|
| 11 |
+
from typing import Optional, Any, Dict, Union
|
| 12 |
+
from datetime import datetime, timezone
|
| 13 |
+
from functools import wraps
|
| 14 |
+
from dataclasses import dataclass, asdict
|
| 15 |
+
|
| 16 |
+
from dotenv import load_dotenv
|
| 17 |
+
|
| 18 |
+
load_dotenv()
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger(__name__)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@dataclass
|
| 24 |
+
class CacheConfig:
|
| 25 |
+
"""Cache configuration with sensible defaults."""
|
| 26 |
+
# TTL values in seconds
|
| 27 |
+
summary_ttl: int = 300 # 5 minutes for summary stats
|
| 28 |
+
trends_ttl: int = 600 # 10 minutes for trend data
|
| 29 |
+
projects_ttl: int = 300 # 5 minutes for project list
|
| 30 |
+
runs_ttl: int = 120 # 2 minutes for run details
|
| 31 |
+
default_ttl: int = 300 # 5 minutes default
|
| 32 |
+
|
| 33 |
+
# Cache key prefixes
|
| 34 |
+
prefix: str = "ideasprinter:observability"
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class UpstashRedisCache:
|
| 38 |
+
"""
|
| 39 |
+
Async-compatible Redis cache using Upstash REST API.
|
| 40 |
+
|
| 41 |
+
Designed for serverless environments with HTTP-based connections.
|
| 42 |
+
Supports both sync and async operations.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
def __init__(self, config: Optional[CacheConfig] = None):
|
| 46 |
+
self.config = config or CacheConfig()
|
| 47 |
+
self._sync_client = None
|
| 48 |
+
self._async_client = None
|
| 49 |
+
self._initialized = False
|
| 50 |
+
|
| 51 |
+
# Load credentials from environment
|
| 52 |
+
self.url = os.getenv("UPSTASH_REDIS_REST_URL")
|
| 53 |
+
self.token = os.getenv("UPSTASH_REDIS_REST_TOKEN")
|
| 54 |
+
|
| 55 |
+
if not self.url or not self.token:
|
| 56 |
+
logger.warning(
|
| 57 |
+
"Upstash Redis credentials not found. "
|
| 58 |
+
"Set UPSTASH_REDIS_REST_URL and UPSTASH_REDIS_REST_TOKEN. "
|
| 59 |
+
"Cache operations will be no-ops."
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
@property
|
| 63 |
+
def is_configured(self) -> bool:
|
| 64 |
+
"""Check if Redis is properly configured."""
|
| 65 |
+
return bool(self.url and self.token)
|
| 66 |
+
|
| 67 |
+
def _get_sync_client(self):
|
| 68 |
+
"""Get or create synchronous Redis client."""
|
| 69 |
+
if not self.is_configured:
|
| 70 |
+
return None
|
| 71 |
+
|
| 72 |
+
if self._sync_client is None:
|
| 73 |
+
try:
|
| 74 |
+
from upstash_redis import Redis
|
| 75 |
+
self._sync_client = Redis(url=self.url, token=self.token)
|
| 76 |
+
logger.info("Upstash Redis sync client initialized")
|
| 77 |
+
except Exception as e:
|
| 78 |
+
logger.error(f"Failed to initialize Upstash Redis sync client: {e}")
|
| 79 |
+
return None
|
| 80 |
+
return self._sync_client
|
| 81 |
+
|
| 82 |
+
def _get_async_client(self):
|
| 83 |
+
"""Get or create asynchronous Redis client."""
|
| 84 |
+
if not self.is_configured:
|
| 85 |
+
return None
|
| 86 |
+
|
| 87 |
+
if self._async_client is None:
|
| 88 |
+
try:
|
| 89 |
+
from upstash_redis.asyncio import Redis
|
| 90 |
+
self._async_client = Redis(url=self.url, token=self.token)
|
| 91 |
+
logger.info("Upstash Redis async client initialized")
|
| 92 |
+
except Exception as e:
|
| 93 |
+
logger.error(f"Failed to initialize Upstash Redis async client: {e}")
|
| 94 |
+
return None
|
| 95 |
+
return self._async_client
|
| 96 |
+
|
| 97 |
+
def _make_key(self, key: str) -> str:
|
| 98 |
+
"""Generate full cache key with prefix."""
|
| 99 |
+
return f"{self.config.prefix}:{key}"
|
| 100 |
+
|
| 101 |
+
def _serialize(self, value: Any) -> str:
|
| 102 |
+
"""Serialize value to JSON string."""
|
| 103 |
+
if isinstance(value, (dict, list)):
|
| 104 |
+
return json.dumps(value, default=str)
|
| 105 |
+
return json.dumps({"value": value}, default=str)
|
| 106 |
+
|
| 107 |
+
def _deserialize(self, value: Optional[str]) -> Any:
|
| 108 |
+
"""Deserialize JSON string to Python object."""
|
| 109 |
+
if value is None:
|
| 110 |
+
return None
|
| 111 |
+
try:
|
| 112 |
+
data = json.loads(value)
|
| 113 |
+
# Unwrap simple values
|
| 114 |
+
if isinstance(data, dict) and "value" in data and len(data) == 1:
|
| 115 |
+
return data["value"]
|
| 116 |
+
return data
|
| 117 |
+
except (json.JSONDecodeError, TypeError):
|
| 118 |
+
return value
|
| 119 |
+
|
| 120 |
+
# ==================== Synchronous Operations ====================
|
| 121 |
+
|
| 122 |
+
def get(self, key: str) -> Optional[Any]:
|
| 123 |
+
"""Get value from cache (sync)."""
|
| 124 |
+
client = self._get_sync_client()
|
| 125 |
+
if not client:
|
| 126 |
+
return None
|
| 127 |
+
|
| 128 |
+
try:
|
| 129 |
+
full_key = self._make_key(key)
|
| 130 |
+
value = client.get(full_key)
|
| 131 |
+
if value:
|
| 132 |
+
logger.debug(f"Cache HIT: {key}")
|
| 133 |
+
return self._deserialize(value)
|
| 134 |
+
logger.debug(f"Cache MISS: {key}")
|
| 135 |
+
return None
|
| 136 |
+
except Exception as e:
|
| 137 |
+
logger.warning(f"Cache get error for {key}: {e}")
|
| 138 |
+
return None
|
| 139 |
+
|
| 140 |
+
def set(
|
| 141 |
+
self,
|
| 142 |
+
key: str,
|
| 143 |
+
value: Any,
|
| 144 |
+
ttl: Optional[int] = None
|
| 145 |
+
) -> bool:
|
| 146 |
+
"""Set value in cache with TTL (sync)."""
|
| 147 |
+
client = self._get_sync_client()
|
| 148 |
+
if not client:
|
| 149 |
+
return False
|
| 150 |
+
|
| 151 |
+
try:
|
| 152 |
+
full_key = self._make_key(key)
|
| 153 |
+
serialized = self._serialize(value)
|
| 154 |
+
ex = ttl or self.config.default_ttl
|
| 155 |
+
|
| 156 |
+
result = client.set(full_key, serialized, ex=ex)
|
| 157 |
+
logger.debug(f"Cache SET: {key} (TTL: {ex}s)")
|
| 158 |
+
return bool(result)
|
| 159 |
+
except Exception as e:
|
| 160 |
+
logger.warning(f"Cache set error for {key}: {e}")
|
| 161 |
+
return False
|
| 162 |
+
|
| 163 |
+
def delete(self, key: str) -> bool:
|
| 164 |
+
"""Delete key from cache (sync)."""
|
| 165 |
+
client = self._get_sync_client()
|
| 166 |
+
if not client:
|
| 167 |
+
return False
|
| 168 |
+
|
| 169 |
+
try:
|
| 170 |
+
full_key = self._make_key(key)
|
| 171 |
+
result = client.delete(full_key)
|
| 172 |
+
logger.debug(f"Cache DELETE: {key}")
|
| 173 |
+
return bool(result)
|
| 174 |
+
except Exception as e:
|
| 175 |
+
logger.warning(f"Cache delete error for {key}: {e}")
|
| 176 |
+
return False
|
| 177 |
+
|
| 178 |
+
def exists(self, key: str) -> bool:
|
| 179 |
+
"""Check if key exists in cache (sync)."""
|
| 180 |
+
client = self._get_sync_client()
|
| 181 |
+
if not client:
|
| 182 |
+
return False
|
| 183 |
+
|
| 184 |
+
try:
|
| 185 |
+
full_key = self._make_key(key)
|
| 186 |
+
return bool(client.exists(full_key))
|
| 187 |
+
except Exception as e:
|
| 188 |
+
logger.warning(f"Cache exists error for {key}: {e}")
|
| 189 |
+
return False
|
| 190 |
+
|
| 191 |
+
# ==================== Asynchronous Operations ====================
|
| 192 |
+
|
| 193 |
+
async def aget(self, key: str) -> Optional[Any]:
|
| 194 |
+
"""Get value from cache (async)."""
|
| 195 |
+
client = self._get_async_client()
|
| 196 |
+
if not client:
|
| 197 |
+
return None
|
| 198 |
+
|
| 199 |
+
try:
|
| 200 |
+
full_key = self._make_key(key)
|
| 201 |
+
value = await client.get(full_key)
|
| 202 |
+
if value:
|
| 203 |
+
logger.debug(f"Cache HIT: {key}")
|
| 204 |
+
return self._deserialize(value)
|
| 205 |
+
logger.debug(f"Cache MISS: {key}")
|
| 206 |
+
return None
|
| 207 |
+
except Exception as e:
|
| 208 |
+
logger.warning(f"Async cache get error for {key}: {e}")
|
| 209 |
+
return None
|
| 210 |
+
|
| 211 |
+
async def aset(
|
| 212 |
+
self,
|
| 213 |
+
key: str,
|
| 214 |
+
value: Any,
|
| 215 |
+
ttl: Optional[int] = None
|
| 216 |
+
) -> bool:
|
| 217 |
+
"""Set value in cache with TTL (async)."""
|
| 218 |
+
client = self._get_async_client()
|
| 219 |
+
if not client:
|
| 220 |
+
return False
|
| 221 |
+
|
| 222 |
+
try:
|
| 223 |
+
full_key = self._make_key(key)
|
| 224 |
+
serialized = self._serialize(value)
|
| 225 |
+
ex = ttl or self.config.default_ttl
|
| 226 |
+
|
| 227 |
+
result = await client.set(full_key, serialized, ex=ex)
|
| 228 |
+
logger.debug(f"Async cache SET: {key} (TTL: {ex}s)")
|
| 229 |
+
return bool(result)
|
| 230 |
+
except Exception as e:
|
| 231 |
+
logger.warning(f"Async cache set error for {key}: {e}")
|
| 232 |
+
return False
|
| 233 |
+
|
| 234 |
+
async def adelete(self, key: str) -> bool:
|
| 235 |
+
"""Delete key from cache (async)."""
|
| 236 |
+
client = self._get_async_client()
|
| 237 |
+
if not client:
|
| 238 |
+
return False
|
| 239 |
+
|
| 240 |
+
try:
|
| 241 |
+
full_key = self._make_key(key)
|
| 242 |
+
result = await client.delete(full_key)
|
| 243 |
+
logger.debug(f"Async cache DELETE: {key}")
|
| 244 |
+
return bool(result)
|
| 245 |
+
except Exception as e:
|
| 246 |
+
logger.warning(f"Async cache delete error for {key}: {e}")
|
| 247 |
+
return False
|
| 248 |
+
|
| 249 |
+
async def aexists(self, key: str) -> bool:
|
| 250 |
+
"""Check if key exists in cache (async)."""
|
| 251 |
+
client = self._get_async_client()
|
| 252 |
+
if not client:
|
| 253 |
+
return False
|
| 254 |
+
|
| 255 |
+
try:
|
| 256 |
+
full_key = self._make_key(key)
|
| 257 |
+
return bool(await client.exists(full_key))
|
| 258 |
+
except Exception as e:
|
| 259 |
+
logger.warning(f"Async cache exists error for {key}: {e}")
|
| 260 |
+
return False
|
| 261 |
+
|
| 262 |
+
# ==================== Observability-Specific Methods ====================
|
| 263 |
+
|
| 264 |
+
def get_summary_key(self, user_id: str, start_date: str, end_date: str, project_id: Optional[str] = None) -> str:
|
| 265 |
+
"""Generate cache key for summary stats."""
|
| 266 |
+
parts = ["summary", user_id, start_date, end_date]
|
| 267 |
+
if project_id:
|
| 268 |
+
parts.append(project_id)
|
| 269 |
+
return ":".join(parts)
|
| 270 |
+
|
| 271 |
+
def get_trends_key(self, user_id: str, period: str, project_id: Optional[str] = None) -> str:
|
| 272 |
+
"""Generate cache key for trends data."""
|
| 273 |
+
parts = ["trends", user_id, period]
|
| 274 |
+
if project_id:
|
| 275 |
+
parts.append(project_id)
|
| 276 |
+
return ":".join(parts)
|
| 277 |
+
|
| 278 |
+
def get_projects_key(self, user_id: str) -> str:
|
| 279 |
+
"""Generate cache key for projects list."""
|
| 280 |
+
return f"projects:{user_id}"
|
| 281 |
+
|
| 282 |
+
def get_runs_key(self, user_id: str, project_id: str, page: int = 1) -> str:
|
| 283 |
+
"""Generate cache key for runs list."""
|
| 284 |
+
return f"runs:{user_id}:{project_id}:page{page}"
|
| 285 |
+
|
| 286 |
+
async def cache_summary(
|
| 287 |
+
self,
|
| 288 |
+
user_id: str,
|
| 289 |
+
start_date: str,
|
| 290 |
+
end_date: str,
|
| 291 |
+
data: Dict[str, Any],
|
| 292 |
+
project_id: Optional[str] = None
|
| 293 |
+
) -> bool:
|
| 294 |
+
"""Cache summary statistics."""
|
| 295 |
+
key = self.get_summary_key(user_id, start_date, end_date, project_id)
|
| 296 |
+
return await self.aset(key, data, ttl=self.config.summary_ttl)
|
| 297 |
+
|
| 298 |
+
async def get_cached_summary(
|
| 299 |
+
self,
|
| 300 |
+
user_id: str,
|
| 301 |
+
start_date: str,
|
| 302 |
+
end_date: str,
|
| 303 |
+
project_id: Optional[str] = None
|
| 304 |
+
) -> Optional[Dict[str, Any]]:
|
| 305 |
+
"""Get cached summary statistics."""
|
| 306 |
+
key = self.get_summary_key(user_id, start_date, end_date, project_id)
|
| 307 |
+
return await self.aget(key)
|
| 308 |
+
|
| 309 |
+
async def cache_trends(
|
| 310 |
+
self,
|
| 311 |
+
user_id: str,
|
| 312 |
+
period: str,
|
| 313 |
+
data: Dict[str, Any],
|
| 314 |
+
project_id: Optional[str] = None
|
| 315 |
+
) -> bool:
|
| 316 |
+
"""Cache trends data."""
|
| 317 |
+
key = self.get_trends_key(user_id, period, project_id)
|
| 318 |
+
return await self.aset(key, data, ttl=self.config.trends_ttl)
|
| 319 |
+
|
| 320 |
+
async def get_cached_trends(
|
| 321 |
+
self,
|
| 322 |
+
user_id: str,
|
| 323 |
+
period: str,
|
| 324 |
+
project_id: Optional[str] = None
|
| 325 |
+
) -> Optional[Dict[str, Any]]:
|
| 326 |
+
"""Get cached trends data."""
|
| 327 |
+
key = self.get_trends_key(user_id, period, project_id)
|
| 328 |
+
return await self.aget(key)
|
| 329 |
+
|
| 330 |
+
async def cache_projects(self, user_id: str, data: Dict[str, Any]) -> bool:
|
| 331 |
+
"""Cache projects list."""
|
| 332 |
+
key = self.get_projects_key(user_id)
|
| 333 |
+
return await self.aset(key, data, ttl=self.config.projects_ttl)
|
| 334 |
+
|
| 335 |
+
async def get_cached_projects(self, user_id: str) -> Optional[Dict[str, Any]]:
|
| 336 |
+
"""Get cached projects list."""
|
| 337 |
+
key = self.get_projects_key(user_id)
|
| 338 |
+
return await self.aget(key)
|
| 339 |
+
|
| 340 |
+
async def invalidate_user_cache(self, user_id: str) -> None:
|
| 341 |
+
"""Invalidate all cache entries for a user."""
|
| 342 |
+
# Note: Upstash doesn't support SCAN easily via REST,
|
| 343 |
+
# so we invalidate known keys
|
| 344 |
+
patterns = [
|
| 345 |
+
f"summary:{user_id}:*",
|
| 346 |
+
f"trends:{user_id}:*",
|
| 347 |
+
f"projects:{user_id}",
|
| 348 |
+
f"runs:{user_id}:*"
|
| 349 |
+
]
|
| 350 |
+
logger.info(f"Cache invalidation requested for user {user_id}")
|
| 351 |
+
# In practice, rely on TTL expiration for now
|
| 352 |
+
# For manual invalidation, track specific keys
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
# ==================== Caching Decorator ====================
|
| 356 |
+
|
| 357 |
+
def cached(
|
| 358 |
+
key_func: callable,
|
| 359 |
+
ttl: Optional[int] = None,
|
| 360 |
+
cache_instance: Optional[UpstashRedisCache] = None
|
| 361 |
+
):
|
| 362 |
+
"""
|
| 363 |
+
Decorator for caching async function results.
|
| 364 |
+
|
| 365 |
+
Args:
|
| 366 |
+
key_func: Function that generates cache key from function arguments
|
| 367 |
+
ttl: Time-to-live in seconds (uses default if None)
|
| 368 |
+
cache_instance: Cache instance to use (creates default if None)
|
| 369 |
+
|
| 370 |
+
Example:
|
| 371 |
+
@cached(
|
| 372 |
+
key_func=lambda user_id, period: f"trends:{user_id}:{period}",
|
| 373 |
+
ttl=600
|
| 374 |
+
)
|
| 375 |
+
async def get_trends(user_id: str, period: str):
|
| 376 |
+
# expensive operation
|
| 377 |
+
return await fetch_from_langsmith(...)
|
| 378 |
+
"""
|
| 379 |
+
def decorator(func):
|
| 380 |
+
@wraps(func)
|
| 381 |
+
async def wrapper(*args, **kwargs):
|
| 382 |
+
cache = cache_instance or get_cache()
|
| 383 |
+
|
| 384 |
+
# Generate cache key
|
| 385 |
+
try:
|
| 386 |
+
cache_key = key_func(*args, **kwargs)
|
| 387 |
+
except Exception as e:
|
| 388 |
+
logger.warning(f"Failed to generate cache key: {e}")
|
| 389 |
+
return await func(*args, **kwargs)
|
| 390 |
+
|
| 391 |
+
# Try to get from cache
|
| 392 |
+
cached_value = await cache.aget(cache_key)
|
| 393 |
+
if cached_value is not None:
|
| 394 |
+
return cached_value
|
| 395 |
+
|
| 396 |
+
# Execute function and cache result
|
| 397 |
+
result = await func(*args, **kwargs)
|
| 398 |
+
|
| 399 |
+
if result is not None:
|
| 400 |
+
await cache.aset(cache_key, result, ttl=ttl)
|
| 401 |
+
|
| 402 |
+
return result
|
| 403 |
+
return wrapper
|
| 404 |
+
return decorator
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
# ==================== Global Cache Instance ====================
|
| 408 |
+
|
| 409 |
+
_cache_instance: Optional[UpstashRedisCache] = None
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
def get_cache() -> UpstashRedisCache:
|
| 413 |
+
"""Get or create the global cache instance."""
|
| 414 |
+
global _cache_instance
|
| 415 |
+
if _cache_instance is None:
|
| 416 |
+
_cache_instance = UpstashRedisCache()
|
| 417 |
+
return _cache_instance
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
def init_cache(config: Optional[CacheConfig] = None) -> UpstashRedisCache:
|
| 421 |
+
"""Initialize the global cache with custom config."""
|
| 422 |
+
global _cache_instance
|
| 423 |
+
_cache_instance = UpstashRedisCache(config)
|
| 424 |
+
return _cache_instance
|
| 425 |
+
|
| 426 |
+
|
| 427 |
+
# ==================== Health Check ====================
|
| 428 |
+
|
| 429 |
+
async def check_cache_health() -> Dict[str, Any]:
|
| 430 |
+
"""
|
| 431 |
+
Check Redis cache health and connectivity.
|
| 432 |
+
|
| 433 |
+
Returns:
|
| 434 |
+
Dict with status, latency, and configuration info
|
| 435 |
+
"""
|
| 436 |
+
cache = get_cache()
|
| 437 |
+
result = {
|
| 438 |
+
"service": "upstash_redis",
|
| 439 |
+
"configured": cache.is_configured,
|
| 440 |
+
"status": "unknown",
|
| 441 |
+
"latency_ms": None,
|
| 442 |
+
"error": None
|
| 443 |
+
}
|
| 444 |
+
|
| 445 |
+
if not cache.is_configured:
|
| 446 |
+
result["status"] = "not_configured"
|
| 447 |
+
result["error"] = "Missing UPSTASH_REDIS_REST_URL or UPSTASH_REDIS_REST_TOKEN"
|
| 448 |
+
return result
|
| 449 |
+
|
| 450 |
+
try:
|
| 451 |
+
import time
|
| 452 |
+
start = time.time()
|
| 453 |
+
|
| 454 |
+
# Ping test with a simple set/get
|
| 455 |
+
test_key = "_health_check"
|
| 456 |
+
await cache.aset(test_key, {"timestamp": datetime.now(timezone.utc).isoformat()}, ttl=10)
|
| 457 |
+
value = await cache.aget(test_key)
|
| 458 |
+
await cache.adelete(test_key)
|
| 459 |
+
|
| 460 |
+
latency = (time.time() - start) * 1000
|
| 461 |
+
|
| 462 |
+
result["status"] = "healthy" if value else "degraded"
|
| 463 |
+
result["latency_ms"] = round(latency, 2)
|
| 464 |
+
|
| 465 |
+
except Exception as e:
|
| 466 |
+
result["status"] = "unhealthy"
|
| 467 |
+
result["error"] = str(e)
|
| 468 |
+
|
| 469 |
+
return result
|
pyproject.toml
CHANGED
|
@@ -34,6 +34,7 @@ dependencies = [
|
|
| 34 |
"lightning>=2.6.0",
|
| 35 |
"fiddle>=0.3.0",
|
| 36 |
"cloudpickle>=3.1.2",
|
|
|
|
| 37 |
]
|
| 38 |
|
| 39 |
[project.optional-dependencies]
|
|
|
|
| 34 |
"lightning>=2.6.0",
|
| 35 |
"fiddle>=0.3.0",
|
| 36 |
"cloudpickle>=3.1.2",
|
| 37 |
+
"upstash-redis>=1.5.0",
|
| 38 |
]
|
| 39 |
|
| 40 |
[project.optional-dependencies]
|
requirements.txt
CHANGED
|
@@ -1,15 +1,23 @@
|
|
| 1 |
# This file was autogenerated by uv via the following command:
|
| 2 |
# uv pip compile pyproject.toml -o requirements.txt --python-version 3.12
|
|
|
|
|
|
|
| 3 |
aiohappyeyeballs==2.6.1
|
| 4 |
# via aiohttp
|
| 5 |
aiohttp==3.13.2
|
| 6 |
-
# via
|
|
|
|
|
|
|
| 7 |
aiosignal==1.4.0
|
| 8 |
# via aiohttp
|
| 9 |
annotated-doc==0.0.4
|
| 10 |
# via fastapi
|
| 11 |
annotated-types==0.7.0
|
| 12 |
# via pydantic
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
anyio==4.12.0
|
| 14 |
# via
|
| 15 |
# httpx
|
|
@@ -24,6 +32,7 @@ certifi==2025.11.12
|
|
| 24 |
# via
|
| 25 |
# httpcore
|
| 26 |
# httpx
|
|
|
|
| 27 |
# requests
|
| 28 |
cffi==2.0.0
|
| 29 |
# via cryptography
|
|
@@ -31,17 +40,24 @@ charset-normalizer==3.4.4
|
|
| 31 |
# via requests
|
| 32 |
click==8.3.1
|
| 33 |
# via uvicorn
|
|
|
|
|
|
|
| 34 |
colorama==0.4.6
|
| 35 |
-
# via
|
|
|
|
|
|
|
| 36 |
cryptography==46.0.3
|
| 37 |
# via python-jose
|
| 38 |
defusedxml==0.7.1
|
| 39 |
# via fpdf2
|
| 40 |
ecdsa==0.19.1
|
| 41 |
# via python-jose
|
| 42 |
-
# via ideasprinter-api (pyproject.toml)
|
| 43 |
fastapi==0.123.5
|
| 44 |
# via ideasprinter-api (pyproject.toml)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
filetype==1.2.0
|
| 46 |
# via langchain-nvidia-ai-endpoints
|
| 47 |
fonttools==4.61.0
|
|
@@ -52,6 +68,11 @@ frozenlist==1.8.0
|
|
| 52 |
# via
|
| 53 |
# aiohttp
|
| 54 |
# aiosignal
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 55 |
google-api-core==2.28.1
|
| 56 |
# via google-api-python-client
|
| 57 |
google-api-python-client==2.187.0
|
|
@@ -68,6 +89,8 @@ google-auth-oauthlib==1.2.3
|
|
| 68 |
# via ideasprinter-api (pyproject.toml)
|
| 69 |
googleapis-common-protos==1.72.0
|
| 70 |
# via google-api-core
|
|
|
|
|
|
|
| 71 |
greenlet==3.3.0
|
| 72 |
# via sqlalchemy
|
| 73 |
h11==0.16.0
|
|
@@ -84,6 +107,9 @@ httpx==0.28.1
|
|
| 84 |
# via
|
| 85 |
# langgraph-sdk
|
| 86 |
# langsmith
|
|
|
|
|
|
|
|
|
|
| 87 |
idna==3.11
|
| 88 |
# via
|
| 89 |
# anyio
|
|
@@ -91,7 +117,9 @@ idna==3.11
|
|
| 91 |
# requests
|
| 92 |
# yarl
|
| 93 |
jinja2==3.1.6
|
| 94 |
-
# via
|
|
|
|
|
|
|
| 95 |
jsonpatch==1.33
|
| 96 |
# via langchain-core
|
| 97 |
jsonpointer==3.0.0
|
|
@@ -122,31 +150,60 @@ langsmith==0.4.53
|
|
| 122 |
# via
|
| 123 |
# ideasprinter-api (pyproject.toml)
|
| 124 |
# langchain-core
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 125 |
markupsafe==3.0.3
|
| 126 |
# via jinja2
|
|
|
|
|
|
|
| 127 |
multidict==6.7.0
|
| 128 |
# via
|
| 129 |
# aiohttp
|
| 130 |
# yarl
|
|
|
|
|
|
|
| 131 |
numpy==2.3.5
|
| 132 |
# via
|
| 133 |
# ideasprinter-api (pyproject.toml)
|
|
|
|
| 134 |
oauthlib==3.3.1
|
| 135 |
# via requests-oauthlib
|
|
|
|
|
|
|
| 136 |
orjson==3.11.4
|
| 137 |
# via
|
| 138 |
# langgraph-sdk
|
| 139 |
# langsmith
|
|
|
|
| 140 |
ormsgpack==1.12.0
|
| 141 |
# via langgraph-checkpoint
|
| 142 |
-
packaging==
|
| 143 |
# via
|
|
|
|
| 144 |
# langchain-core
|
| 145 |
# langsmith
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 146 |
passlib==1.7.4
|
| 147 |
# via ideasprinter-api (pyproject.toml)
|
| 148 |
pillow==12.0.0
|
| 149 |
# via fpdf2
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 150 |
propcache==0.4.1
|
| 151 |
# via
|
| 152 |
# aiohttp
|
|
@@ -180,18 +237,28 @@ pydantic-core==2.41.5
|
|
| 180 |
# via pydantic
|
| 181 |
pyparsing==3.2.5
|
| 182 |
# via httplib2
|
|
|
|
|
|
|
| 183 |
python-dotenv==1.2.1
|
| 184 |
# via ideasprinter-api (pyproject.toml)
|
| 185 |
python-jose==3.5.0
|
| 186 |
# via ideasprinter-api (pyproject.toml)
|
| 187 |
python-multipart==0.0.20
|
| 188 |
# via ideasprinter-api (pyproject.toml)
|
|
|
|
|
|
|
| 189 |
pyyaml==6.0.3
|
| 190 |
-
# via
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 191 |
requests==2.32.5
|
| 192 |
# via
|
| 193 |
# google-api-core
|
| 194 |
# langsmith
|
|
|
|
| 195 |
# requests-oauthlib
|
| 196 |
# requests-toolbelt
|
| 197 |
requests-oauthlib==2.0.0
|
|
@@ -202,31 +269,62 @@ rsa==4.9.1
|
|
| 202 |
# via
|
| 203 |
# google-auth
|
| 204 |
# python-jose
|
|
|
|
|
|
|
|
|
|
|
|
|
| 205 |
six==1.17.0
|
| 206 |
-
# via
|
|
|
|
|
|
|
| 207 |
sqlalchemy==2.0.45
|
| 208 |
# via ideasprinter-api (pyproject.toml)
|
| 209 |
starlette==0.50.0
|
| 210 |
# via fastapi
|
|
|
|
|
|
|
| 211 |
tenacity==9.1.2
|
| 212 |
# via langchain-core
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
typing-extensions==4.15.0
|
| 214 |
# via
|
| 215 |
# aiosignal
|
| 216 |
# anyio
|
| 217 |
# fastapi
|
|
|
|
| 218 |
# langchain-core
|
|
|
|
|
|
|
|
|
|
| 219 |
# pydantic
|
| 220 |
# pydantic-core
|
|
|
|
| 221 |
# sqlalchemy
|
| 222 |
# starlette
|
|
|
|
| 223 |
# typing-inspection
|
| 224 |
typing-inspection==0.4.2
|
| 225 |
# via pydantic
|
|
|
|
|
|
|
| 226 |
uritemplate==4.2.0
|
| 227 |
# via google-api-python-client
|
| 228 |
urllib3==2.5.0
|
| 229 |
-
# via
|
|
|
|
|
|
|
| 230 |
uuid-utils==0.12.0
|
| 231 |
# via langsmith
|
| 232 |
uvicorn==0.38.0
|
|
|
|
| 1 |
# This file was autogenerated by uv via the following command:
|
| 2 |
# uv pip compile pyproject.toml -o requirements.txt --python-version 3.12
|
| 3 |
+
absl-py==2.3.1
|
| 4 |
+
# via fiddle
|
| 5 |
aiohappyeyeballs==2.6.1
|
| 6 |
# via aiohttp
|
| 7 |
aiohttp==3.13.2
|
| 8 |
+
# via
|
| 9 |
+
# fsspec
|
| 10 |
+
# langchain-nvidia-ai-endpoints
|
| 11 |
aiosignal==1.4.0
|
| 12 |
# via aiohttp
|
| 13 |
annotated-doc==0.0.4
|
| 14 |
# via fastapi
|
| 15 |
annotated-types==0.7.0
|
| 16 |
# via pydantic
|
| 17 |
+
antlr4-python3-runtime==4.9.3
|
| 18 |
+
# via
|
| 19 |
+
# hydra-core
|
| 20 |
+
# omegaconf
|
| 21 |
anyio==4.12.0
|
| 22 |
# via
|
| 23 |
# httpx
|
|
|
|
| 32 |
# via
|
| 33 |
# httpcore
|
| 34 |
# httpx
|
| 35 |
+
# pinecone
|
| 36 |
# requests
|
| 37 |
cffi==2.0.0
|
| 38 |
# via cryptography
|
|
|
|
| 40 |
# via requests
|
| 41 |
click==8.3.1
|
| 42 |
# via uvicorn
|
| 43 |
+
cloudpickle==3.1.2
|
| 44 |
+
# via ideasprinter-api (pyproject.toml)
|
| 45 |
colorama==0.4.6
|
| 46 |
+
# via
|
| 47 |
+
# click
|
| 48 |
+
# tqdm
|
| 49 |
cryptography==46.0.3
|
| 50 |
# via python-jose
|
| 51 |
defusedxml==0.7.1
|
| 52 |
# via fpdf2
|
| 53 |
ecdsa==0.19.1
|
| 54 |
# via python-jose
|
|
|
|
| 55 |
fastapi==0.123.5
|
| 56 |
# via ideasprinter-api (pyproject.toml)
|
| 57 |
+
fiddle==0.3.0
|
| 58 |
+
# via ideasprinter-api (pyproject.toml)
|
| 59 |
+
filelock==3.20.2
|
| 60 |
+
# via torch
|
| 61 |
filetype==1.2.0
|
| 62 |
# via langchain-nvidia-ai-endpoints
|
| 63 |
fonttools==4.61.0
|
|
|
|
| 68 |
# via
|
| 69 |
# aiohttp
|
| 70 |
# aiosignal
|
| 71 |
+
fsspec==2025.12.0
|
| 72 |
+
# via
|
| 73 |
+
# lightning
|
| 74 |
+
# pytorch-lightning
|
| 75 |
+
# torch
|
| 76 |
google-api-core==2.28.1
|
| 77 |
# via google-api-python-client
|
| 78 |
google-api-python-client==2.187.0
|
|
|
|
| 89 |
# via ideasprinter-api (pyproject.toml)
|
| 90 |
googleapis-common-protos==1.72.0
|
| 91 |
# via google-api-core
|
| 92 |
+
graphviz==0.21
|
| 93 |
+
# via fiddle
|
| 94 |
greenlet==3.3.0
|
| 95 |
# via sqlalchemy
|
| 96 |
h11==0.16.0
|
|
|
|
| 107 |
# via
|
| 108 |
# langgraph-sdk
|
| 109 |
# langsmith
|
| 110 |
+
# upstash-redis
|
| 111 |
+
hydra-core==1.3.2
|
| 112 |
+
# via ideasprinter-api (pyproject.toml)
|
| 113 |
idna==3.11
|
| 114 |
# via
|
| 115 |
# anyio
|
|
|
|
| 117 |
# requests
|
| 118 |
# yarl
|
| 119 |
jinja2==3.1.6
|
| 120 |
+
# via
|
| 121 |
+
# ideasprinter-api (pyproject.toml)
|
| 122 |
+
# torch
|
| 123 |
jsonpatch==1.33
|
| 124 |
# via langchain-core
|
| 125 |
jsonpointer==3.0.0
|
|
|
|
| 150 |
# via
|
| 151 |
# ideasprinter-api (pyproject.toml)
|
| 152 |
# langchain-core
|
| 153 |
+
libcst==1.8.6
|
| 154 |
+
# via fiddle
|
| 155 |
+
lightning==2.6.0
|
| 156 |
+
# via ideasprinter-api (pyproject.toml)
|
| 157 |
+
lightning-utilities==0.15.2
|
| 158 |
+
# via
|
| 159 |
+
# lightning
|
| 160 |
+
# pytorch-lightning
|
| 161 |
+
# torchmetrics
|
| 162 |
markupsafe==3.0.3
|
| 163 |
# via jinja2
|
| 164 |
+
mpmath==1.3.0
|
| 165 |
+
# via sympy
|
| 166 |
multidict==6.7.0
|
| 167 |
# via
|
| 168 |
# aiohttp
|
| 169 |
# yarl
|
| 170 |
+
networkx==3.6.1
|
| 171 |
+
# via torch
|
| 172 |
numpy==2.3.5
|
| 173 |
# via
|
| 174 |
# ideasprinter-api (pyproject.toml)
|
| 175 |
+
# torchmetrics
|
| 176 |
oauthlib==3.3.1
|
| 177 |
# via requests-oauthlib
|
| 178 |
+
omegaconf==2.3.0
|
| 179 |
+
# via hydra-core
|
| 180 |
orjson==3.11.4
|
| 181 |
# via
|
| 182 |
# langgraph-sdk
|
| 183 |
# langsmith
|
| 184 |
+
# pinecone
|
| 185 |
ormsgpack==1.12.0
|
| 186 |
# via langgraph-checkpoint
|
| 187 |
+
packaging==24.2
|
| 188 |
# via
|
| 189 |
+
# hydra-core
|
| 190 |
# langchain-core
|
| 191 |
# langsmith
|
| 192 |
+
# lightning
|
| 193 |
+
# lightning-utilities
|
| 194 |
+
# pinecone-plugin-assistant
|
| 195 |
+
# pytorch-lightning
|
| 196 |
+
# torchmetrics
|
| 197 |
passlib==1.7.4
|
| 198 |
# via ideasprinter-api (pyproject.toml)
|
| 199 |
pillow==12.0.0
|
| 200 |
# via fpdf2
|
| 201 |
+
pinecone==8.0.0
|
| 202 |
+
# via ideasprinter-api (pyproject.toml)
|
| 203 |
+
pinecone-plugin-assistant==3.0.1
|
| 204 |
+
# via pinecone
|
| 205 |
+
pinecone-plugin-interface==0.0.7
|
| 206 |
+
# via pinecone
|
| 207 |
propcache==0.4.1
|
| 208 |
# via
|
| 209 |
# aiohttp
|
|
|
|
| 237 |
# via pydantic
|
| 238 |
pyparsing==3.2.5
|
| 239 |
# via httplib2
|
| 240 |
+
python-dateutil==2.9.0.post0
|
| 241 |
+
# via pinecone
|
| 242 |
python-dotenv==1.2.1
|
| 243 |
# via ideasprinter-api (pyproject.toml)
|
| 244 |
python-jose==3.5.0
|
| 245 |
# via ideasprinter-api (pyproject.toml)
|
| 246 |
python-multipart==0.0.20
|
| 247 |
# via ideasprinter-api (pyproject.toml)
|
| 248 |
+
pytorch-lightning==2.6.0
|
| 249 |
+
# via lightning
|
| 250 |
pyyaml==6.0.3
|
| 251 |
+
# via
|
| 252 |
+
# langchain-core
|
| 253 |
+
# libcst
|
| 254 |
+
# lightning
|
| 255 |
+
# omegaconf
|
| 256 |
+
# pytorch-lightning
|
| 257 |
requests==2.32.5
|
| 258 |
# via
|
| 259 |
# google-api-core
|
| 260 |
# langsmith
|
| 261 |
+
# pinecone-plugin-assistant
|
| 262 |
# requests-oauthlib
|
| 263 |
# requests-toolbelt
|
| 264 |
requests-oauthlib==2.0.0
|
|
|
|
| 269 |
# via
|
| 270 |
# google-auth
|
| 271 |
# python-jose
|
| 272 |
+
setuptools==80.9.0
|
| 273 |
+
# via
|
| 274 |
+
# lightning-utilities
|
| 275 |
+
# torch
|
| 276 |
six==1.17.0
|
| 277 |
+
# via
|
| 278 |
+
# ecdsa
|
| 279 |
+
# python-dateutil
|
| 280 |
sqlalchemy==2.0.45
|
| 281 |
# via ideasprinter-api (pyproject.toml)
|
| 282 |
starlette==0.50.0
|
| 283 |
# via fastapi
|
| 284 |
+
sympy==1.14.0
|
| 285 |
+
# via torch
|
| 286 |
tenacity==9.1.2
|
| 287 |
# via langchain-core
|
| 288 |
+
torch==2.9.1
|
| 289 |
+
# via
|
| 290 |
+
# lightning
|
| 291 |
+
# pytorch-lightning
|
| 292 |
+
# torchmetrics
|
| 293 |
+
torchmetrics==1.8.2
|
| 294 |
+
# via
|
| 295 |
+
# lightning
|
| 296 |
+
# pytorch-lightning
|
| 297 |
+
tqdm==4.67.1
|
| 298 |
+
# via
|
| 299 |
+
# lightning
|
| 300 |
+
# pytorch-lightning
|
| 301 |
typing-extensions==4.15.0
|
| 302 |
# via
|
| 303 |
# aiosignal
|
| 304 |
# anyio
|
| 305 |
# fastapi
|
| 306 |
+
# fiddle
|
| 307 |
# langchain-core
|
| 308 |
+
# lightning
|
| 309 |
+
# lightning-utilities
|
| 310 |
+
# pinecone
|
| 311 |
# pydantic
|
| 312 |
# pydantic-core
|
| 313 |
+
# pytorch-lightning
|
| 314 |
# sqlalchemy
|
| 315 |
# starlette
|
| 316 |
+
# torch
|
| 317 |
# typing-inspection
|
| 318 |
typing-inspection==0.4.2
|
| 319 |
# via pydantic
|
| 320 |
+
upstash-redis==1.5.0
|
| 321 |
+
# via ideasprinter-api (pyproject.toml)
|
| 322 |
uritemplate==4.2.0
|
| 323 |
# via google-api-python-client
|
| 324 |
urllib3==2.5.0
|
| 325 |
+
# via
|
| 326 |
+
# pinecone
|
| 327 |
+
# requests
|
| 328 |
uuid-utils==0.12.0
|
| 329 |
# via langsmith
|
| 330 |
uvicorn==0.38.0
|
uv.lock
CHANGED
|
@@ -903,6 +903,7 @@ dependencies = [
|
|
| 903 |
{ name = "python-jose", extra = ["cryptography"] },
|
| 904 |
{ name = "python-multipart" },
|
| 905 |
{ name = "sqlalchemy" },
|
|
|
|
| 906 |
{ name = "uvicorn" },
|
| 907 |
]
|
| 908 |
|
|
@@ -941,6 +942,7 @@ requires-dist = [
|
|
| 941 |
{ name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" },
|
| 942 |
{ name = "python-multipart", specifier = ">=0.0.6" },
|
| 943 |
{ name = "sqlalchemy", specifier = ">=2.0.0" },
|
|
|
|
| 944 |
{ name = "uvicorn", specifier = ">=0.30.0" },
|
| 945 |
]
|
| 946 |
provides-extras = ["dev"]
|
|
@@ -2535,6 +2537,18 @@ wheels = [
|
|
| 2535 |
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
| 2536 |
]
|
| 2537 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2538 |
[[package]]
|
| 2539 |
name = "uritemplate"
|
| 2540 |
version = "4.2.0"
|
|
|
|
| 903 |
{ name = "python-jose", extra = ["cryptography"] },
|
| 904 |
{ name = "python-multipart" },
|
| 905 |
{ name = "sqlalchemy" },
|
| 906 |
+
{ name = "upstash-redis" },
|
| 907 |
{ name = "uvicorn" },
|
| 908 |
]
|
| 909 |
|
|
|
|
| 942 |
{ name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" },
|
| 943 |
{ name = "python-multipart", specifier = ">=0.0.6" },
|
| 944 |
{ name = "sqlalchemy", specifier = ">=2.0.0" },
|
| 945 |
+
{ name = "upstash-redis", specifier = ">=1.5.0" },
|
| 946 |
{ name = "uvicorn", specifier = ">=0.30.0" },
|
| 947 |
]
|
| 948 |
provides-extras = ["dev"]
|
|
|
|
| 2537 |
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
| 2538 |
]
|
| 2539 |
|
| 2540 |
+
[[package]]
|
| 2541 |
+
name = "upstash-redis"
|
| 2542 |
+
version = "1.5.0"
|
| 2543 |
+
source = { registry = "https://pypi.org/simple" }
|
| 2544 |
+
dependencies = [
|
| 2545 |
+
{ name = "httpx" },
|
| 2546 |
+
]
|
| 2547 |
+
sdist = { url = "https://files.pythonhosted.org/packages/a8/62/bc53c35fbf4e2b774ab0eb02f3908cfe89b6636e87cdc40b264a4fc1dcce/upstash_redis-1.5.0.tar.gz", hash = "sha256:1917d4d009ca803815092892d92c7da9138b4ada6b353974fb74caf063c6d2a3", size = 39356, upload-time = "2025-10-22T10:15:34.608Z" }
|
| 2548 |
+
wheels = [
|
| 2549 |
+
{ url = "https://files.pythonhosted.org/packages/5a/87/d24541a1d9c29033e74aa05b5d8b4857feff79344ebd8fca410eb4683795/upstash_redis-1.5.0-py3-none-any.whl", hash = "sha256:e08de1f74d3fb48a81b383c00398cc9336c43b65b82e6d9266312143970800b9", size = 41088, upload-time = "2025-10-22T10:15:33.363Z" },
|
| 2550 |
+
]
|
| 2551 |
+
|
| 2552 |
[[package]]
|
| 2553 |
name = "uritemplate"
|
| 2554 |
version = "4.2.0"
|