File size: 1,080 Bytes
668e4e1
 
b7833be
668e4e1
b7833be
 
 
 
668e4e1
b7833be
 
 
668e4e1
b7833be
668e4e1
 
 
 
 
 
 
 
b7833be
 
668e4e1
 
 
 
 
 
 
 
 
b7833be
 
668e4e1
 
 
b7833be
 
668e4e1
 
 
b7833be
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
# app/ai/agent/message_cache.py
"""
DEPRECATED: Message caching has been disabled.

As of 2026-02-16, all LLM responses are generated fresh to ensure:
1. Dynamic, context-aware messages
2. No stale cached responses
3. Accurate language-specific generation

This file is kept as a placeholder to prevent import errors.
All cache functions are no-ops that do nothing.
"""

from typing import Optional, Dict


def get_cached_message(
    context: str,
    language: str,
    tone: str,
    max_length: str
) -> Optional[str]:
    """DEPRECATED: Always returns None (cache disabled)."""
    return None


def cache_message(
    context: str,
    language: str,
    tone: str,
    max_length: str,
    message: str
):
    """DEPRECATED: No-op (cache disabled)."""
    pass


def clear_message_cache():
    """DEPRECATED: No-op (cache disabled)."""
    pass


def get_cache_stats() -> Dict:
    """DEPRECATED: Returns empty stats (cache disabled)."""
    return {
        "total_entries": 0,
        "expired_entries": 0,
        "active_entries": 0,
        "status": "DISABLED"
    }