Spaces:
Running
Running
| """ | |
| Prompt Cache System | |
| Saves generated prompts temporarily for reuse and editing | |
| """ | |
| import json | |
| import os | |
| from datetime import datetime | |
| from typing import Dict, List, Optional, Any | |
| from pathlib import Path | |
| # Storage directory for cached prompts | |
| CACHE_DIR = Path("storage/prompt_cache") | |
| CACHE_DIR.mkdir(parents=True, exist_ok=True) | |
| # In-memory cache for quick access | |
| _prompt_cache: Dict[str, Dict[str, Any]] = {} | |
| def save_prompt( | |
| prompt_id: str, | |
| payload: Dict[str, Any], | |
| metadata: Optional[Dict[str, Any]] = None | |
| ) -> str: | |
| """ | |
| Save a generated prompt to cache | |
| Args: | |
| prompt_id: Unique identifier for the prompt | |
| payload: The segments payload | |
| metadata: Optional metadata (script, style, etc.) | |
| Returns: | |
| The prompt_id | |
| """ | |
| cache_entry = { | |
| "prompt_id": prompt_id, | |
| "payload": payload, | |
| "metadata": metadata or {}, | |
| "created_at": datetime.now().isoformat(), | |
| "updated_at": datetime.now().isoformat() | |
| } | |
| # Save to in-memory cache | |
| _prompt_cache[prompt_id] = cache_entry | |
| # Save to disk for persistence | |
| cache_file = CACHE_DIR / f"{prompt_id}.json" | |
| with open(cache_file, 'w') as f: | |
| json.dump(cache_entry, f, indent=2) | |
| print(f"💾 Saved prompt to cache: {prompt_id}") | |
| return prompt_id | |
| def get_prompt(prompt_id: str) -> Optional[Dict[str, Any]]: | |
| """ | |
| Retrieve a cached prompt | |
| Args: | |
| prompt_id: The prompt identifier | |
| Returns: | |
| The cached prompt entry or None | |
| """ | |
| # Check in-memory cache first | |
| if prompt_id in _prompt_cache: | |
| return _prompt_cache[prompt_id] | |
| # Check disk cache | |
| cache_file = CACHE_DIR / f"{prompt_id}.json" | |
| if cache_file.exists(): | |
| with open(cache_file, 'r') as f: | |
| cache_entry = json.load(f) | |
| _prompt_cache[prompt_id] = cache_entry | |
| return cache_entry | |
| return None | |
| def update_prompt( | |
| prompt_id: str, | |
| payload: Optional[Dict[str, Any]] = None, | |
| metadata: Optional[Dict[str, Any]] = None | |
| ) -> Optional[Dict[str, Any]]: | |
| """ | |
| Update an existing cached prompt | |
| Args: | |
| prompt_id: The prompt identifier | |
| payload: Updated payload (optional) | |
| metadata: Updated metadata (optional) | |
| Returns: | |
| The updated cache entry or None if not found | |
| """ | |
| cache_entry = get_prompt(prompt_id) | |
| if not cache_entry: | |
| return None | |
| # Update fields | |
| if payload is not None: | |
| cache_entry["payload"] = payload | |
| if metadata is not None: | |
| cache_entry["metadata"].update(metadata) | |
| cache_entry["updated_at"] = datetime.now().isoformat() | |
| # Save updated entry | |
| _prompt_cache[prompt_id] = cache_entry | |
| cache_file = CACHE_DIR / f"{prompt_id}.json" | |
| with open(cache_file, 'w') as f: | |
| json.dump(cache_entry, f, indent=2) | |
| print(f"✏️ Updated prompt in cache: {prompt_id}") | |
| return cache_entry | |
| def list_prompts(limit: int = 50) -> List[Dict[str, Any]]: | |
| """ | |
| List all cached prompts (most recent first) | |
| Args: | |
| limit: Maximum number of prompts to return | |
| Returns: | |
| List of cached prompts | |
| """ | |
| # Load all from disk if in-memory cache is empty | |
| if not _prompt_cache: | |
| for cache_file in CACHE_DIR.glob("*.json"): | |
| try: | |
| with open(cache_file, 'r') as f: | |
| cache_entry = json.load(f) | |
| _prompt_cache[cache_entry["prompt_id"]] = cache_entry | |
| except Exception as e: | |
| print(f"⚠️ Error loading {cache_file}: {e}") | |
| # Sort by updated_at (most recent first) | |
| prompts = sorted( | |
| _prompt_cache.values(), | |
| key=lambda x: x.get("updated_at", ""), | |
| reverse=True | |
| ) | |
| return prompts[:limit] | |
| def delete_prompt(prompt_id: str) -> bool: | |
| """ | |
| Delete a cached prompt | |
| Args: | |
| prompt_id: The prompt identifier | |
| Returns: | |
| True if deleted, False if not found | |
| """ | |
| # Remove from in-memory cache | |
| if prompt_id in _prompt_cache: | |
| del _prompt_cache[prompt_id] | |
| # Remove from disk | |
| cache_file = CACHE_DIR / f"{prompt_id}.json" | |
| if cache_file.exists(): | |
| cache_file.unlink() | |
| print(f"🗑️ Deleted prompt from cache: {prompt_id}") | |
| return True | |
| return False | |
| def cleanup_old_prompts(max_age_days: int = 7): | |
| """ | |
| Clean up prompts older than specified days | |
| Args: | |
| max_age_days: Maximum age in days | |
| """ | |
| from datetime import timedelta | |
| cutoff = datetime.now() - timedelta(days=max_age_days) | |
| deleted = 0 | |
| for cache_file in CACHE_DIR.glob("*.json"): | |
| try: | |
| with open(cache_file, 'r') as f: | |
| cache_entry = json.load(f) | |
| created_at = datetime.fromisoformat(cache_entry["created_at"]) | |
| if created_at < cutoff: | |
| cache_file.unlink() | |
| prompt_id = cache_entry["prompt_id"] | |
| if prompt_id in _prompt_cache: | |
| del _prompt_cache[prompt_id] | |
| deleted += 1 | |
| except Exception as e: | |
| print(f"⚠️ Error cleaning up {cache_file}: {e}") | |
| if deleted > 0: | |
| print(f"🧹 Cleaned up {deleted} old prompts") | |