File size: 1,242 Bytes
5830944
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import time
from typing import Dict, Any, List
from core.faiss_vector import FaissIndex

_TTL = 300  # seconds
_cache: Dict[str, Any] = {}
_faiss = None

def _now():
    return int(time.time())

def _get_index():
    global _faiss
    if _faiss is None:
        _faiss = FaissIndex()
    return _faiss

def cache_get(key):
    v = _cache.get(key)
    if not v:
        return None
    ts, data = v
    if _now() - ts > _TTL:
        _cache.pop(key, None)
        return None
    return data

def cache_set(key, data: List[dict]):
    _cache[key] = (_now(), data)
    # add snippets to FAISS for local semantic recall
    texts = [d.get("snippet","") for d in data if d.get("snippet")]
    if texts:
        try:
            _get_index().add(texts)
        except Exception:
            pass

def dedupe(results: List[dict]) -> List[dict]:
    seen = set()
    out = []
    for r in results:
        h = hash((r.get("source"), r.get("url"), r.get("snippet")))
        if h not in seen:
            seen.add(h)
            out.append(r)
    return out

def source_counts(results: List[dict]) -> Dict[str,int]:
    counts = {}
    for r in results:
        s = r.get("source","Unknown")
        counts[s] = counts.get(s, 0) + 1
    return counts