AIDA / app /ai /agent /message_cache.py
destinyebuka's picture
fyp
b7833be
# app/ai/agent/message_cache.py
"""
DEPRECATED: Message caching has been disabled.
As of 2026-02-16, all LLM responses are generated fresh to ensure:
1. Dynamic, context-aware messages
2. No stale cached responses
3. Accurate language-specific generation
This file is kept as a placeholder to prevent import errors.
All cache functions are no-ops that do nothing.
"""
from typing import Optional, Dict
def get_cached_message(
context: str,
language: str,
tone: str,
max_length: str
) -> Optional[str]:
"""DEPRECATED: Always returns None (cache disabled)."""
return None
def cache_message(
context: str,
language: str,
tone: str,
max_length: str,
message: str
):
"""DEPRECATED: No-op (cache disabled)."""
pass
def clear_message_cache():
"""DEPRECATED: No-op (cache disabled)."""
pass
def get_cache_stats() -> Dict:
"""DEPRECATED: Returns empty stats (cache disabled)."""
return {
"total_entries": 0,
"expired_entries": 0,
"active_entries": 0,
"status": "DISABLED"
}