| | import sys |
| | from pathlib import Path |
| |
|
| | |
| | project_root = Path(__file__).parent |
| | sys.path.append(str(project_root)) |
| |
|
| | from core.redis_client import redis_client |
| | from core.session import session_manager |
| | from core.llm import send_to_ollama |
| | import os |
| |
|
| | def test_full_system(): |
| | """Test the complete AI Life Coach system""" |
| | print("=== AI Life Coach Full System Test ===") |
| | print() |
| | |
| | |
| | print("Test 1: Redis Connection") |
| | try: |
| | client = redis_client.get_client() |
| | if client: |
| | result = client.ping() |
| | print(f"β
Redis ping successful: {result}") |
| | |
| | |
| | client.set('system_test_key', 'system_test_value') |
| | value = client.get('system_test_key') |
| | client.delete('system_test_key') |
| | |
| | if value == 'system_test_value': |
| | print("β
Redis set/get operations working") |
| | else: |
| | print("β Redis set/get operations failed") |
| | else: |
| | print("β Redis client is None") |
| | return False |
| | except Exception as e: |
| | print(f"β Redis test failed: {e}") |
| | return False |
| | |
| | print() |
| | |
| | |
| | print("Test 2: Session Management") |
| | try: |
| | session = session_manager.get_session("test_user") |
| | print("β
Session creation successful") |
| | |
| | |
| | result = session_manager.update_session("test_user", {"test": "data"}) |
| | if result: |
| | print("β
Session update successful") |
| | else: |
| | print("β Session update failed") |
| | |
| | |
| | session_manager.clear_session("test_user") |
| | print("β
Session cleanup successful") |
| | |
| | except Exception as e: |
| | print(f"β Session management test failed: {e}") |
| | return False |
| | |
| | print() |
| | |
| | |
| | print("Test 3: Ollama Integration") |
| | try: |
| | conversation_history = [ |
| | {"role": "user", "content": "Hello! Please introduce yourself briefly."} |
| | ] |
| | |
| | ollama_url = "https://7bcc180dffd1.ngrok-free.app" |
| | model_name = "mistral:latest" |
| | |
| | response = send_to_ollama( |
| | "Hello! Please introduce yourself briefly.", |
| | conversation_history, |
| | ollama_url, |
| | model_name |
| | ) |
| | |
| | if response: |
| | print("β
Ollama integration successful") |
| | print(f"Response: {response[:100]}{'...' if len(response) > 100 else ''}") |
| | else: |
| | print("β Ollama integration failed - no response") |
| | |
| | except Exception as e: |
| | print(f"β Ollama integration test failed: {e}") |
| | return False |
| | |
| | print() |
| | print("π All system tests passed!") |
| | print("Your AI Life Coach is ready to use!") |
| | return True |
| |
|
| | if __name__ == "__main__": |
| | success = test_full_system() |
| | if success: |
| | print("\nβ
System is fully operational!") |
| | else: |
| | print("\nβ System has issues that need attention!") |
| | sys.exit(1) |
| |
|