Spaces:
Sleeping
Sleeping
File size: 7,690 Bytes
3856f78 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 |
#!/usr/bin/env python3
"""
Test script to demonstrate the cache system functionality
"""
import time
import logging
from app.services.linkedin_search import LinkedInSearchService
from app.services.cache_service import CacheService
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def test_cache_functionality():
"""Test the cache system functionality"""
logger.info("๐งช Starting cache system test...")
# Initialize services
linkedin_service = LinkedInSearchService()
cache_service = linkedin_service.cache_service
# Test 1: Check initial cache stats
logger.info("\n๐ Test 1: Initial cache statistics")
stats = cache_service.get_cache_stats()
logger.info(f"Cache enabled: {stats['cache_enabled']}")
logger.info(f"Cache TTL: {stats['cache_ttl']} seconds")
logger.info(f"Cache max size: {stats['cache_max_size']}")
logger.info(f"Search cache size: {stats['search_cache_size']}")
logger.info(f"Profile cache size: {stats['profile_cache_size']}")
logger.info(f"Query cache size: {stats['query_cache_size']}")
# Test 2: Test search caching
logger.info("\n๐ Test 2: Search result caching")
job_description = "Senior Python Developer with React experience"
location = "San Francisco, CA"
# First search (should miss cache)
logger.info("Performing first search (should miss cache)...")
start_time = time.time()
results1 = linkedin_service.search_linkedin_profiles(
job_description=job_description,
location=location,
max_results=5
)
first_search_time = time.time() - start_time
logger.info(f"First search completed in {first_search_time:.2f} seconds")
logger.info(f"Found {len(results1)} candidates")
# Second search (should hit cache)
logger.info("Performing second search (should hit cache)...")
start_time = time.time()
results2 = linkedin_service.search_linkedin_profiles(
job_description=job_description,
location=location,
max_results=5
)
second_search_time = time.time() - start_time
logger.info(f"Second search completed in {second_search_time:.2f} seconds")
logger.info(f"Found {len(results2)} candidates")
# Compare results
if len(results1) == len(results2):
logger.info("โ
Cache test successful - same number of results returned")
else:
logger.warning("โ ๏ธ Cache test failed - different number of results")
# Check performance improvement
if first_search_time > 0:
improvement = ((first_search_time - second_search_time) / first_search_time) * 100
logger.info(f"Performance improvement: {improvement:.1f}%")
# Test 3: Test query caching
logger.info("\n๐ Test 3: Query result caching")
test_query = "site:linkedin.com/in/ Python Developer San Francisco"
# First query (should miss cache)
logger.info("Performing first query (should miss cache)...")
start_time = time.time()
query_results1 = linkedin_service._perform_google_search(test_query, 5)
first_query_time = time.time() - start_time
logger.info(f"First query completed in {first_query_time:.2f} seconds")
logger.info(f"Found {len(query_results1)} query results")
# Second query (should hit cache)
logger.info("Performing second query (should hit cache)...")
start_time = time.time()
query_results2 = linkedin_service._perform_google_search(test_query, 5)
second_query_time = time.time() - start_time
logger.info(f"Second query completed in {second_query_time:.2f} seconds")
logger.info(f"Found {len(query_results2)} query results")
# Test 4: Test profile caching
logger.info("\n๐ค Test 4: Profile data caching")
if results1:
test_profile_url = results1[0].get('profile_url', '')
if test_profile_url:
logger.info(f"Testing profile caching for: {test_profile_url}")
# First profile scrape (should miss cache)
logger.info("Performing first profile scrape (should miss cache)...")
start_time = time.time()
profile_data1 = linkedin_service._scrape_linkedin_profile(test_profile_url)
first_profile_time = time.time() - start_time
logger.info(f"First profile scrape completed in {first_profile_time:.2f} seconds")
# Second profile scrape (should hit cache)
logger.info("Performing second profile scrape (should hit cache)...")
start_time = time.time()
profile_data2 = linkedin_service._scrape_linkedin_profile(test_profile_url)
second_profile_time = time.time() - start_time
logger.info(f"Second profile scrape completed in {second_profile_time:.2f} seconds")
if profile_data1.get('success') == profile_data2.get('success'):
logger.info("โ
Profile cache test successful")
else:
logger.warning("โ ๏ธ Profile cache test failed")
# Test 5: Updated cache stats
logger.info("\n๐ Test 5: Updated cache statistics")
updated_stats = cache_service.get_cache_stats()
logger.info(f"Search cache size: {updated_stats['search_cache_size']}")
logger.info(f"Profile cache size: {updated_stats['profile_cache_size']}")
logger.info(f"Query cache size: {updated_stats['query_cache_size']}")
# Test 6: Cache cleanup
logger.info("\n๐งน Test 6: Cache cleanup")
cache_service.cleanup_expired_entries()
logger.info("Cache cleanup completed")
# Test 7: Cache clearing
logger.info("\n๐๏ธ Test 7: Cache clearing")
cache_service.clear_cache("query") # Clear only query cache
logger.info("Query cache cleared")
final_stats = cache_service.get_cache_stats()
logger.info(f"Query cache size after clearing: {final_stats['query_cache_size']}")
logger.info("\n๐ Cache system test completed successfully!")
def test_cache_persistence():
"""Test cache persistence to file"""
logger.info("\n๐พ Testing cache persistence...")
# Initialize cache service
cache_service = CacheService()
# Add some test data
test_data = {
"test_search": [{"name": "Test Candidate", "profile_url": "https://linkedin.com/in/test"}],
"test_profile": {"name": "Test Profile", "success": True}
}
# Set test data
cache_service.set_search_results("test job", "test location", 5, test_data["test_search"])
cache_service.set_profile_data("https://linkedin.com/in/test", test_data["test_profile"])
logger.info("Test data added to cache")
# Force save to persistent storage
cache_service._save_persistent_cache()
logger.info("Cache saved to persistent storage")
# Create new cache service instance (simulates restart)
new_cache_service = CacheService()
# Try to retrieve the data
retrieved_search = new_cache_service.get_search_results("test job", "test location", 5)
retrieved_profile = new_cache_service.get_profile_data("https://linkedin.com/in/test")
if retrieved_search and retrieved_profile:
logger.info("โ
Cache persistence test successful")
else:
logger.warning("โ ๏ธ Cache persistence test failed")
# Clean up test data
new_cache_service.clear_cache("all")
if __name__ == "__main__":
try:
test_cache_functionality()
test_cache_persistence()
except Exception as e:
logger.error(f"โ Test failed: {str(e)}")
raise |