File size: 5,417 Bytes
82a1419
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
"""
Prompt Cache System
Saves generated prompts temporarily for reuse and editing
"""

import json
import os
from datetime import datetime
from typing import Dict, List, Optional, Any
from pathlib import Path

# Storage directory for cached prompts
CACHE_DIR = Path("storage/prompt_cache")
CACHE_DIR.mkdir(parents=True, exist_ok=True)

# In-memory cache for quick access
_prompt_cache: Dict[str, Dict[str, Any]] = {}


def save_prompt(
    prompt_id: str,
    payload: Dict[str, Any],
    metadata: Optional[Dict[str, Any]] = None
) -> str:
    """
    Save a generated prompt to cache
    
    Args:
        prompt_id: Unique identifier for the prompt
        payload: The segments payload
        metadata: Optional metadata (script, style, etc.)
    
    Returns:
        The prompt_id
    """
    cache_entry = {
        "prompt_id": prompt_id,
        "payload": payload,
        "metadata": metadata or {},
        "created_at": datetime.now().isoformat(),
        "updated_at": datetime.now().isoformat()
    }
    
    # Save to in-memory cache
    _prompt_cache[prompt_id] = cache_entry
    
    # Save to disk for persistence
    cache_file = CACHE_DIR / f"{prompt_id}.json"
    with open(cache_file, 'w') as f:
        json.dump(cache_entry, f, indent=2)
    
    print(f"💾 Saved prompt to cache: {prompt_id}")
    return prompt_id


def get_prompt(prompt_id: str) -> Optional[Dict[str, Any]]:
    """
    Retrieve a cached prompt
    
    Args:
        prompt_id: The prompt identifier
    
    Returns:
        The cached prompt entry or None
    """
    # Check in-memory cache first
    if prompt_id in _prompt_cache:
        return _prompt_cache[prompt_id]
    
    # Check disk cache
    cache_file = CACHE_DIR / f"{prompt_id}.json"
    if cache_file.exists():
        with open(cache_file, 'r') as f:
            cache_entry = json.load(f)
            _prompt_cache[prompt_id] = cache_entry
            return cache_entry
    
    return None


def update_prompt(
    prompt_id: str,
    payload: Optional[Dict[str, Any]] = None,
    metadata: Optional[Dict[str, Any]] = None
) -> Optional[Dict[str, Any]]:
    """
    Update an existing cached prompt
    
    Args:
        prompt_id: The prompt identifier
        payload: Updated payload (optional)
        metadata: Updated metadata (optional)
    
    Returns:
        The updated cache entry or None if not found
    """
    cache_entry = get_prompt(prompt_id)
    if not cache_entry:
        return None
    
    # Update fields
    if payload is not None:
        cache_entry["payload"] = payload
    if metadata is not None:
        cache_entry["metadata"].update(metadata)
    
    cache_entry["updated_at"] = datetime.now().isoformat()
    
    # Save updated entry
    _prompt_cache[prompt_id] = cache_entry
    cache_file = CACHE_DIR / f"{prompt_id}.json"
    with open(cache_file, 'w') as f:
        json.dump(cache_entry, f, indent=2)
    
    print(f"✏️  Updated prompt in cache: {prompt_id}")
    return cache_entry


def list_prompts(limit: int = 50) -> List[Dict[str, Any]]:
    """
    List all cached prompts (most recent first)
    
    Args:
        limit: Maximum number of prompts to return
    
    Returns:
        List of cached prompts
    """
    # Load all from disk if in-memory cache is empty
    if not _prompt_cache:
        for cache_file in CACHE_DIR.glob("*.json"):
            try:
                with open(cache_file, 'r') as f:
                    cache_entry = json.load(f)
                    _prompt_cache[cache_entry["prompt_id"]] = cache_entry
            except Exception as e:
                print(f"⚠️  Error loading {cache_file}: {e}")
    
    # Sort by updated_at (most recent first)
    prompts = sorted(
        _prompt_cache.values(),
        key=lambda x: x.get("updated_at", ""),
        reverse=True
    )
    
    return prompts[:limit]


def delete_prompt(prompt_id: str) -> bool:
    """
    Delete a cached prompt
    
    Args:
        prompt_id: The prompt identifier
    
    Returns:
        True if deleted, False if not found
    """
    # Remove from in-memory cache
    if prompt_id in _prompt_cache:
        del _prompt_cache[prompt_id]
    
    # Remove from disk
    cache_file = CACHE_DIR / f"{prompt_id}.json"
    if cache_file.exists():
        cache_file.unlink()
        print(f"🗑️  Deleted prompt from cache: {prompt_id}")
        return True
    
    return False


def cleanup_old_prompts(max_age_days: int = 7):
    """
    Clean up prompts older than specified days
    
    Args:
        max_age_days: Maximum age in days
    """
    from datetime import timedelta
    
    cutoff = datetime.now() - timedelta(days=max_age_days)
    deleted = 0
    
    for cache_file in CACHE_DIR.glob("*.json"):
        try:
            with open(cache_file, 'r') as f:
                cache_entry = json.load(f)
                created_at = datetime.fromisoformat(cache_entry["created_at"])
                
                if created_at < cutoff:
                    cache_file.unlink()
                    prompt_id = cache_entry["prompt_id"]
                    if prompt_id in _prompt_cache:
                        del _prompt_cache[prompt_id]
                    deleted += 1
        except Exception as e:
            print(f"⚠️  Error cleaning up {cache_file}: {e}")
    
    if deleted > 0:
        print(f"🧹 Cleaned up {deleted} old prompts")