Spaces:
Running
Running
File size: 8,432 Bytes
b7833be |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 |
# tests/test_language_refactor.py
"""
Verification tests for the language logic refactoring.
Tests:
1. _generate_my_listings_message returns a string
2. Language detection works for "Bonjour" (mocking the LLM response)
3. Caching is indeed removed (no import errors)
Run with: pytest tests/test_language_refactor.py -v
"""
import pytest
import asyncio
from unittest.mock import AsyncMock, patch, MagicMock
class TestCachingRemoved:
"""Test that caching has been properly disabled."""
def test_cache_functions_are_noops(self):
"""Verify cache functions exist but do nothing."""
from app.ai.agent.message_cache import (
get_cached_message,
cache_message,
clear_message_cache,
get_cache_stats
)
# get_cached_message should always return None
result = get_cached_message("test context", "en", "friendly", "short")
assert result is None, "get_cached_message should always return None (caching disabled)"
# cache_message should not raise
cache_message("test context", "en", "friendly", "short", "test message")
# clear_message_cache should not raise
clear_message_cache()
# get_cache_stats should return disabled status
stats = get_cache_stats()
assert stats.get("status") == "DISABLED" or stats.get("active_entries") == 0
def test_generate_localized_response_no_cache_import_issues(self):
"""Verify generate_localized_response can be imported without errors."""
try:
from app.ai.agent.brain import generate_localized_response
assert callable(generate_localized_response)
except ImportError as e:
pytest.fail(f"Import error: {e}")
class TestGenerateMyListingsMessage:
"""Test the _generate_my_listings_message function."""
@pytest.mark.asyncio
async def test_returns_string_for_empty_listings(self):
"""Test that empty listings returns a helpful message."""
from app.ai.agent.brain import _generate_my_listings_message
# Mock the LLM call
with patch('app.ai.agent.brain.brain_llm') as mock_llm:
mock_response = MagicMock()
mock_response.content = "You don't have any listings yet! ๐ Would you like me to help you create your first one?"
mock_llm.ainvoke = AsyncMock(return_value=mock_response)
result = await _generate_my_listings_message(
listings=[],
language="en",
user_name="John"
)
assert isinstance(result, str), "_generate_my_listings_message should return a string"
assert len(result) > 0, "Result should not be empty"
@pytest.mark.asyncio
async def test_returns_string_for_listings_with_data(self):
"""Test that listings with data returns a personalized message."""
from app.ai.agent.brain import _generate_my_listings_message
# Mock the LLM call
with patch('app.ai.agent.brain.brain_llm') as mock_llm:
mock_response = MagicMock()
mock_response.content = "Hey John! ๐ Here are your 2 listings (1 for rent, 1 for sale). ๐ก Tip: Long-press any listing to edit or delete it!"
mock_llm.ainvoke = AsyncMock(return_value=mock_response)
test_listings = [
{"_id": "123", "title": "Apartment", "listing_type": "rent"},
{"_id": "456", "title": "House", "listing_type": "sale"},
]
result = await _generate_my_listings_message(
listings=test_listings,
language="en",
user_name="John Doe"
)
assert isinstance(result, str), "_generate_my_listings_message should return a string"
assert len(result) > 0, "Result should not be empty"
class TestLanguageDetection:
"""Test language detection in classify_intent."""
@pytest.mark.asyncio
async def test_french_detection_bonjour(self):
"""Test that 'Bonjour' is detected as French."""
from app.ai.agent.state import AgentState
# Mock the LLM response for classification
with patch('app.ai.agent.nodes.classify_intent.llm') as mock_llm:
mock_response = MagicMock()
mock_response.content = '''
{
"type": "greeting",
"confidence": 0.95,
"reasoning": "User greeted in French",
"language": "fr",
"requires_auth": false,
"next_action": "greet"
}
'''
mock_llm.ainvoke = AsyncMock(return_value=mock_response)
from app.ai.agent.nodes.classify_intent import classify_intent
# Create minimal state with required fields
state = AgentState(
user_id="test_user",
session_id="test_session",
user_role="renter" # Required field
)
state.last_user_message = "Bonjour"
result_state = await classify_intent(state)
# Verify language was detected
assert result_state.language_detected == "fr", \
f"Expected language_detected='fr', got '{result_state.language_detected}'"
@pytest.mark.asyncio
async def test_spanish_detection_hola(self):
"""Test that 'Hola' is detected as Spanish."""
from app.ai.agent.state import AgentState
# Mock the LLM response for classification
with patch('app.ai.agent.nodes.classify_intent.llm') as mock_llm:
mock_response = MagicMock()
mock_response.content = '''
{
"type": "greeting",
"confidence": 0.95,
"reasoning": "User greeted in Spanish",
"language": "es",
"requires_auth": false,
"next_action": "greet"
}
'''
mock_llm.ainvoke = AsyncMock(return_value=mock_response)
from app.ai.agent.nodes.classify_intent import classify_intent
# Create minimal state with required fields
state = AgentState(
user_id="test_user",
session_id="test_session",
user_role="renter" # Required field
)
state.last_user_message = "Hola, busco un apartamento"
result_state = await classify_intent(state)
# Verify language was detected
assert result_state.language_detected == "es", \
f"Expected language_detected='es', got '{result_state.language_detected}'"
class TestGenerateLocalizedResponse:
"""Test the generate_localized_response function."""
@pytest.mark.asyncio
async def test_returns_string_on_success(self):
"""Test that a successful LLM call returns a string."""
from app.ai.agent.brain import generate_localized_response
# Mock the LLM call
with patch('app.ai.agent.brain.brain_llm') as mock_llm:
mock_response = MagicMock()
mock_response.content = "Bienvenue! Comment puis-je vous aider?"
mock_llm.ainvoke = AsyncMock(return_value=mock_response)
result = await generate_localized_response(
context="Greet user warmly",
language="fr",
tone="friendly",
max_length="short"
)
assert isinstance(result, str)
assert len(result) > 0
@pytest.mark.asyncio
async def test_fallback_on_failure(self):
"""Test that LLM failure returns generic English fallback."""
from app.ai.agent.brain import generate_localized_response
# Mock the LLM call to fail
with patch('app.ai.agent.brain.brain_llm') as mock_llm:
mock_llm.ainvoke = AsyncMock(side_effect=Exception("LLM Error"))
result = await generate_localized_response(
context="Greet user warmly",
language="fr",
tone="friendly",
max_length="short",
max_retries=1 # Reduce retries for faster test
)
# Should return the fallback message
assert result == "Service temporarily unavailable. Please try again."
if __name__ == "__main__":
pytest.main([__file__, "-v"])
|