File size: 1,306 Bytes
3ca965c
 
 
 
 
 
 
 
 
 
 
f507044
3ca965c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b36bc5f
 
 
 
 
 
3ca965c
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
"""
cache.py
Simple pickle cache for backtest results.
Keyed by MD5 of: last_date + start_yr + fee_bps + split + lookback
"""

import hashlib
import pickle
from pathlib import Path

CACHE_DIR     = Path("/tmp/p2_arima_cache")
CACHE_VERSION = "v6"  # bumped: three-factor composite (50% mom + 25% RS/SPY + 25% MA slope)
CACHE_DIR.mkdir(exist_ok=True)


def make_cache_key(last_date, start_yr, fee_bps, split, lookback):
    raw = f"{CACHE_VERSION}_{last_date}_{start_yr}_{fee_bps}_{split}_{lookback}"
    return hashlib.md5(raw.encode()).hexdigest()


def make_lb_cache_key(last_date, start_yr, split):
    raw = f"{CACHE_VERSION}_lb_{last_date}_{start_yr}_{split}"
    return hashlib.md5(raw.encode()).hexdigest()


def save_cache(key, payload):
    try:
        with open(CACHE_DIR / f"{key}.pkl", "wb") as f:
            pickle.dump(payload, f)
    except Exception:
        pass


def clear_all_cache():
    """Wipe all cached results — call when strategy logic changes."""
    for f in CACHE_DIR.glob("*.pkl"):
        f.unlink(missing_ok=True)


def load_cache(key):
    path = CACHE_DIR / f"{key}.pkl"
    if path.exists():
        try:
            with open(path, "rb") as f:
                return pickle.load(f)
        except Exception:
            path.unlink(missing_ok=True)
    return None