Spaces:
Running
Running
| # tests/test_language_refactor.py | |
| """ | |
| Verification tests for the language logic refactoring. | |
| Tests: | |
| 1. _generate_my_listings_message returns a string | |
| 2. Language detection works for "Bonjour" (mocking the LLM response) | |
| 3. Caching is indeed removed (no import errors) | |
| Run with: pytest tests/test_language_refactor.py -v | |
| """ | |
| import pytest | |
| import asyncio | |
| from unittest.mock import AsyncMock, patch, MagicMock | |
| class TestCachingRemoved: | |
| """Test that caching has been properly disabled.""" | |
| def test_cache_functions_are_noops(self): | |
| """Verify cache functions exist but do nothing.""" | |
| from app.ai.agent.message_cache import ( | |
| get_cached_message, | |
| cache_message, | |
| clear_message_cache, | |
| get_cache_stats | |
| ) | |
| # get_cached_message should always return None | |
| result = get_cached_message("test context", "en", "friendly", "short") | |
| assert result is None, "get_cached_message should always return None (caching disabled)" | |
| # cache_message should not raise | |
| cache_message("test context", "en", "friendly", "short", "test message") | |
| # clear_message_cache should not raise | |
| clear_message_cache() | |
| # get_cache_stats should return disabled status | |
| stats = get_cache_stats() | |
| assert stats.get("status") == "DISABLED" or stats.get("active_entries") == 0 | |
| def test_generate_localized_response_no_cache_import_issues(self): | |
| """Verify generate_localized_response can be imported without errors.""" | |
| try: | |
| from app.ai.agent.brain import generate_localized_response | |
| assert callable(generate_localized_response) | |
| except ImportError as e: | |
| pytest.fail(f"Import error: {e}") | |
| class TestGenerateMyListingsMessage: | |
| """Test the _generate_my_listings_message function.""" | |
| async def test_returns_string_for_empty_listings(self): | |
| """Test that empty listings returns a helpful message.""" | |
| from app.ai.agent.brain import _generate_my_listings_message | |
| # Mock the LLM call | |
| with patch('app.ai.agent.brain.brain_llm') as mock_llm: | |
| mock_response = MagicMock() | |
| mock_response.content = "You don't have any listings yet! 🏠 Would you like me to help you create your first one?" | |
| mock_llm.ainvoke = AsyncMock(return_value=mock_response) | |
| result = await _generate_my_listings_message( | |
| listings=[], | |
| language="en", | |
| user_name="John" | |
| ) | |
| assert isinstance(result, str), "_generate_my_listings_message should return a string" | |
| assert len(result) > 0, "Result should not be empty" | |
| async def test_returns_string_for_listings_with_data(self): | |
| """Test that listings with data returns a personalized message.""" | |
| from app.ai.agent.brain import _generate_my_listings_message | |
| # Mock the LLM call | |
| with patch('app.ai.agent.brain.brain_llm') as mock_llm: | |
| mock_response = MagicMock() | |
| mock_response.content = "Hey John! 🏠 Here are your 2 listings (1 for rent, 1 for sale). 💡 Tip: Long-press any listing to edit or delete it!" | |
| mock_llm.ainvoke = AsyncMock(return_value=mock_response) | |
| test_listings = [ | |
| {"_id": "123", "title": "Apartment", "listing_type": "rent"}, | |
| {"_id": "456", "title": "House", "listing_type": "sale"}, | |
| ] | |
| result = await _generate_my_listings_message( | |
| listings=test_listings, | |
| language="en", | |
| user_name="John Doe" | |
| ) | |
| assert isinstance(result, str), "_generate_my_listings_message should return a string" | |
| assert len(result) > 0, "Result should not be empty" | |
| class TestLanguageDetection: | |
| """Test language detection in classify_intent.""" | |
| async def test_french_detection_bonjour(self): | |
| """Test that 'Bonjour' is detected as French.""" | |
| from app.ai.agent.state import AgentState | |
| # Mock the LLM response for classification | |
| with patch('app.ai.agent.nodes.classify_intent.llm') as mock_llm: | |
| mock_response = MagicMock() | |
| mock_response.content = ''' | |
| { | |
| "type": "greeting", | |
| "confidence": 0.95, | |
| "reasoning": "User greeted in French", | |
| "language": "fr", | |
| "requires_auth": false, | |
| "next_action": "greet" | |
| } | |
| ''' | |
| mock_llm.ainvoke = AsyncMock(return_value=mock_response) | |
| from app.ai.agent.nodes.classify_intent import classify_intent | |
| # Create minimal state with required fields | |
| state = AgentState( | |
| user_id="test_user", | |
| session_id="test_session", | |
| user_role="renter" # Required field | |
| ) | |
| state.last_user_message = "Bonjour" | |
| result_state = await classify_intent(state) | |
| # Verify language was detected | |
| assert result_state.language_detected == "fr", \ | |
| f"Expected language_detected='fr', got '{result_state.language_detected}'" | |
| async def test_spanish_detection_hola(self): | |
| """Test that 'Hola' is detected as Spanish.""" | |
| from app.ai.agent.state import AgentState | |
| # Mock the LLM response for classification | |
| with patch('app.ai.agent.nodes.classify_intent.llm') as mock_llm: | |
| mock_response = MagicMock() | |
| mock_response.content = ''' | |
| { | |
| "type": "greeting", | |
| "confidence": 0.95, | |
| "reasoning": "User greeted in Spanish", | |
| "language": "es", | |
| "requires_auth": false, | |
| "next_action": "greet" | |
| } | |
| ''' | |
| mock_llm.ainvoke = AsyncMock(return_value=mock_response) | |
| from app.ai.agent.nodes.classify_intent import classify_intent | |
| # Create minimal state with required fields | |
| state = AgentState( | |
| user_id="test_user", | |
| session_id="test_session", | |
| user_role="renter" # Required field | |
| ) | |
| state.last_user_message = "Hola, busco un apartamento" | |
| result_state = await classify_intent(state) | |
| # Verify language was detected | |
| assert result_state.language_detected == "es", \ | |
| f"Expected language_detected='es', got '{result_state.language_detected}'" | |
| class TestGenerateLocalizedResponse: | |
| """Test the generate_localized_response function.""" | |
| async def test_returns_string_on_success(self): | |
| """Test that a successful LLM call returns a string.""" | |
| from app.ai.agent.brain import generate_localized_response | |
| # Mock the LLM call | |
| with patch('app.ai.agent.brain.brain_llm') as mock_llm: | |
| mock_response = MagicMock() | |
| mock_response.content = "Bienvenue! Comment puis-je vous aider?" | |
| mock_llm.ainvoke = AsyncMock(return_value=mock_response) | |
| result = await generate_localized_response( | |
| context="Greet user warmly", | |
| language="fr", | |
| tone="friendly", | |
| max_length="short" | |
| ) | |
| assert isinstance(result, str) | |
| assert len(result) > 0 | |
| async def test_fallback_on_failure(self): | |
| """Test that LLM failure returns generic English fallback.""" | |
| from app.ai.agent.brain import generate_localized_response | |
| # Mock the LLM call to fail | |
| with patch('app.ai.agent.brain.brain_llm') as mock_llm: | |
| mock_llm.ainvoke = AsyncMock(side_effect=Exception("LLM Error")) | |
| result = await generate_localized_response( | |
| context="Greet user warmly", | |
| language="fr", | |
| tone="friendly", | |
| max_length="short", | |
| max_retries=1 # Reduce retries for faster test | |
| ) | |
| # Should return the fallback message | |
| assert result == "Service temporarily unavailable. Please try again." | |
| if __name__ == "__main__": | |
| pytest.main([__file__, "-v"]) | |