Spaces:
Running
Running
File size: 4,477 Bytes
6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 6930f23 4dd1bc3 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 | """
Tests for model cache system.
"""
import pytest
import time
from backend.core.model_cache import ModelCache, get_model_cache
def test_cache_singleton():
"""Test that ModelCache is a singleton."""
cache1 = ModelCache()
cache2 = ModelCache()
cache3 = get_model_cache()
assert cache1 is cache2
assert cache2 is cache3
# All should reference the same instance
cache1.clear() # Clear first
cache1.set('test', 'value', 1.0)
assert cache2.get('test') == 'value'
assert cache3.get('test') == 'value'
cache1.clear() # Clean up
def test_cache_basic_operations():
"""Test basic cache get/set operations."""
cache = ModelCache()
cache.clear()
cache.reset_stats()
# Cache miss
assert cache.get('model1') is None
# Cache set
cache.set('model1', {'data': 'test'}, 100.0)
# Cache hit
result = cache.get('model1')
assert result == {'data': 'test'}
# Statistics
stats = cache.stats()
assert stats['total_models'] == 1
assert stats['memory_mb'] == 100.0
assert stats['cache_hits'] >= 1
assert stats['cache_misses'] >= 1
cache.clear() # Clean up
def test_cache_lru_eviction():
"""Test LRU eviction when cache is full."""
cache = ModelCache()
cache.clear()
cache.max_models = 3 # Small limit for testing
# Add 3 models
cache.set('model1', 'data1', 100.0)
cache.set('model2', 'data2', 100.0)
cache.set('model3', 'data3', 100.0)
assert cache.stats()['total_models'] == 3
# Access model1 to make it recently used
cache.get('model1')
# Add 4th model - should evict model2 (least recently used)
cache.set('model4', 'data4', 100.0)
assert cache.stats()['total_models'] == 3
assert cache.get('model1') is not None # Still there (recently used)
assert cache.get('model2') is None # Evicted
assert cache.get('model3') is not None # Still there
assert cache.get('model4') is not None # Newly added
cache.clear() # Clean up
def test_cache_memory_limit():
"""Test memory limit enforcement."""
cache = ModelCache()
cache.clear()
original_limit = cache.max_memory_mb
cache.max_memory_mb = 500 # 500MB limit
cache.max_models = 10
# Add models totaling 300MB
cache.set('model1', 'data1', 100.0)
cache.set('model2', 'data2', 100.0)
cache.set('model3', 'data3', 100.0)
assert cache.stats()['memory_mb'] == 300.0
# Try to add 300MB more (total would be 600MB > 500MB limit)
cache.set('model4', 'data4', 300.0)
# Should have evicted models to stay under limit
stats = cache.stats()
assert stats['memory_mb'] <= 500.0
assert stats['evictions'] >= 1
# Restore and clean up
cache.max_memory_mb = original_limit
cache.clear()
def test_cache_clear():
"""Test cache clearing."""
cache = ModelCache()
cache.clear() # Start fresh
cache.set('model1', 'data1', 100.0)
cache.set('model2', 'data2', 100.0)
assert cache.stats()['total_models'] == 2
cache.clear()
stats = cache.stats()
assert stats['total_models'] == 0
assert stats['memory_mb'] == 0.0
assert cache.get('model1') is None
assert cache.get('model2') is None
def test_cache_statistics():
"""Test cache statistics tracking."""
cache = ModelCache()
cache.clear()
cache.reset_stats()
# Generate some cache activity
cache.get('missing') # miss
cache.set('model1', 'data', 100.0)
cache.get('model1') # hit
cache.get('model1') # hit
cache.get('missing2') # miss
stats = cache.stats()
assert stats['cache_hits'] >= 2
assert stats['cache_misses'] >= 2
assert stats['hit_rate'] > 0
assert 'models' in stats
assert 'model1' in stats['models']
cache.clear() # Clean up
def test_cache_disabled():
"""Test cache when disabled."""
cache = ModelCache()
cache.clear()
original_setting = cache.enable_cache
cache.enable_cache = False
# Set should not cache
cache.set('model1', 'data1', 100.0)
# Get should return None
assert cache.get('model1') is None
# Re-enable
cache.enable_cache = True
cache.set('model2', 'data2', 100.0)
assert cache.get('model2') == 'data2'
# Restore and clean up
cache.enable_cache = original_setting
cache.clear()
|