| """Layer 3 – Step 9: Detect natural-experiment scenarios from raw macro data. |
| |
| Reads raw CSVs from ``data/macro/`` (NOT the processed panel) and identifies |
| historically significant macro events. Scenarios are granularity-independent |
| calendar-date events. |
| |
| Output: ``data/benchmark/{granularity}/scenarios.parquet`` |
| |
| Uses all available FRED series + EIA commodity data to detect 49 event types |
| covering rates, equity, commodities, FX, inflation, labor, credit, housing, |
| money supply, financial conditions, and cross-asset composite signals. |
| Short-term (5-day) and medium-term (21-day) windows are used for daily series. |
| """ |
|
|
| from __future__ import annotations |
|
|
| import logging |
| from pathlib import Path |
|
|
| import numpy as np |
| import pandas as pd |
|
|
| from . import config |
|
|
| logger = logging.getLogger(__name__) |
|
|
|
|
| |
| |
| |
|
|
| def _load_fred(series_id: str) -> pd.DataFrame: |
| """Load a single FRED CSV, returning (date, value) DataFrame.""" |
| path = config.MACRO_DIR / f"fred_{series_id}.csv" |
| if not path.exists(): |
| return pd.DataFrame(columns=["date", "value"]) |
| df = pd.read_csv(path) |
| if "date" not in df.columns: |
| return pd.DataFrame(columns=["date", "value"]) |
| df["date"] = pd.to_datetime(df["date"]) |
| non_date = [c for c in df.columns if c != "date"] |
| if not non_date: |
| return pd.DataFrame(columns=["date", "value"]) |
| val_col = series_id if series_id in df.columns else non_date[0] |
| df = df[["date", val_col]].rename(columns={val_col: "value"}) |
| df["value"] = pd.to_numeric(df["value"], errors="coerce") |
| return df.dropna(subset=["value"]).sort_values("date").reset_index(drop=True) |
|
|
|
|
| def _load_commodity_spot(subdir: str, candidates: list[str]) -> pd.DataFrame: |
| """Load a commodity spot CSV from a macro subdirectory.""" |
| commodity_dir = config.MACRO_DIR / subdir |
| if not commodity_dir.is_dir(): |
| return pd.DataFrame(columns=["date", "value"]) |
| for candidate in candidates: |
| path = commodity_dir / candidate |
| if not path.exists(): |
| continue |
| df = pd.read_csv(path) |
| date_col = next((c for c in df.columns if "time" in c.lower() or "date" in c.lower()), None) |
| if date_col is None: |
| continue |
| num_cols = df.select_dtypes(include="number").columns.tolist() |
| val_col = next((c for c in df.columns if c != date_col and "spot" in c.lower()), None) |
| if val_col is None and num_cols: |
| val_col = num_cols[0] |
| if val_col is None: |
| continue |
| df[date_col] = pd.to_datetime(df[date_col], errors="coerce") |
| df = df[[date_col, val_col]].rename(columns={date_col: "date", val_col: "value"}) |
| df["value"] = pd.to_numeric(df["value"], errors="coerce") |
| return df.dropna(subset=["value"]).sort_values("date").reset_index(drop=True) |
| return pd.DataFrame(columns=["date", "value"]) |
|
|
|
|
| def _load_crude_spot() -> pd.DataFrame: |
| return _load_commodity_spot("crude_oil", ["crude_spot_daily.csv"]) |
|
|
|
|
| def _load_natgas_spot() -> pd.DataFrame: |
| return _load_commodity_spot("natural_gas", [ |
| "natural_gas_spot_weekly.csv", |
| "natural_gas_spot_daily.csv", |
| ]) |
|
|
|
|
| |
| |
| |
|
|
| def _detect_fed_rate_changes(df: pd.DataFrame) -> list[dict]: |
| """Detect FEDFUNDS changes >= SCENARIO_FEDFUNDS_DELTA between consecutive observations.""" |
| if df.empty: |
| return [] |
| events = [] |
| delta = config.SCENARIO_FEDFUNDS_DELTA |
| prev_val = df["value"].iloc[0] |
| for _, row in df.iloc[1:].iterrows(): |
| change = row["value"] - prev_val |
| if abs(change) >= delta: |
| direction = "raised" if change > 0 else "lowered" |
| events.append({ |
| "event_type": "fed_rate_change", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the Fed {direction} rates by " |
| f"{abs(change)*100:.0f}bps to {row['value']:.2f}%." |
| ), |
| }) |
| prev_val = row["value"] |
| return events |
|
|
|
|
| def _detect_vix_spikes(df: pd.DataFrame) -> list[dict]: |
| """Detect VIX > ratio * rolling mean.""" |
| if len(df) < config.SCENARIO_VIX_ROLLING_WINDOW: |
| return [] |
| events = [] |
| ratio = config.SCENARIO_VIX_SPIKE_RATIO |
| window = config.SCENARIO_VIX_ROLLING_WINDOW |
| df = df.copy() |
| df["rolling_mean"] = df["value"].rolling(window, min_periods=window).mean() |
| df = df.dropna(subset=["rolling_mean"]) |
| spike_mask = df["value"] > ratio * df["rolling_mean"] |
| |
| if spike_mask.any(): |
| spike_idx = spike_mask[spike_mask].index |
| groups: list[list[int]] = [] |
| current: list[int] = [spike_idx[0]] |
| for i in spike_idx[1:]: |
| if i == current[-1] + 1: |
| current.append(i) |
| else: |
| groups.append(current) |
| current = [i] |
| groups.append(current) |
| for g in groups: |
| row = df.loc[g[0]] |
| events.append({ |
| "event_type": "vix_spike", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, VIX spiked to {row['value']:.1f} " |
| f"({row['value']/row['rolling_mean']:.1f}x its {window}-day average of " |
| f"{row['rolling_mean']:.1f})." |
| ), |
| }) |
| return events |
|
|
|
|
| def _detect_oil_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect crude-oil moves >= threshold over rolling window.""" |
| if len(df) < config.SCENARIO_OIL_ROLLING_WINDOW: |
| return [] |
| events = [] |
| window = config.SCENARIO_OIL_ROLLING_WINDOW |
| pct = config.SCENARIO_OIL_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].copy() |
| if large.empty: |
| return events |
| |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "surged" if row["pct_change"] > 0 else "plunged" |
| events.append({ |
| "event_type": "oil_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, crude oil {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over the prior {window} trading days " |
| f"to ${row['value']:.2f}/bbl." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_market_drawdowns(df: pd.DataFrame) -> list[dict]: |
| """Detect S&P 500 drops >= threshold over rolling window.""" |
| if len(df) < config.SCENARIO_SP500_ROLLING_WINDOW: |
| return [] |
| events = [] |
| window = config.SCENARIO_SP500_ROLLING_WINDOW |
| pct = config.SCENARIO_SP500_DRAWDOWN |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| drops = df[df["pct_change"] <= -pct].copy() |
| if drops.empty: |
| return events |
| prev_date = None |
| for _, row in drops.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| events.append({ |
| "event_type": "market_drawdown", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the S&P 500 dropped " |
| f"{abs(row['pct_change'])*100:.1f}% over the prior {window} trading days " |
| f"to {row['value']:.0f}." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_natgas_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect natural-gas spot moves >= threshold over rolling window.""" |
| window = config.SCENARIO_NATGAS_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| pct = config.SCENARIO_NATGAS_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].copy() |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window * 7: |
| continue |
| direction = "surged" if row["pct_change"] > 0 else "plunged" |
| events.append({ |
| "event_type": "natgas_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, natural gas {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over the prior {window} periods " |
| f"to ${row['value']:.2f}/MMBtu." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_nasdaq_moves(df: pd.DataFrame) -> list[dict]: |
| """Detect NASDAQ large moves (crashes or rallies) over rolling window.""" |
| window = config.SCENARIO_NASDAQ_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| pct = config.SCENARIO_NASDAQ_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].copy() |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "rallied" if row["pct_change"] > 0 else "dropped" |
| events.append({ |
| "event_type": "nasdaq_move", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the NASDAQ Composite {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over the prior {window} trading days " |
| f"to {row['value']:.0f}." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_yield_curve_events(dgs10: pd.DataFrame, dgs2: pd.DataFrame) -> list[dict]: |
| """Detect yield curve inversions and steep re-steepening events.""" |
| if dgs10.empty or dgs2.empty: |
| return [] |
| merged = pd.merge(dgs10, dgs2, on="date", suffixes=("_10y", "_2y")) |
| if merged.empty: |
| return [] |
| merged = merged.sort_values("date").reset_index(drop=True) |
| merged["spread"] = merged["value_10y"] - merged["value_2y"] |
|
|
| window = config.SCENARIO_YIELD_CURVE_WINDOW |
| events = [] |
|
|
| |
| merged["prev_spread"] = merged["spread"].shift(1) |
| inversions = merged[ |
| (merged["spread"] < config.SCENARIO_YIELD_CURVE_INVERSION) & |
| (merged["prev_spread"] >= config.SCENARIO_YIELD_CURVE_INVERSION) |
| ] |
| prev_date = None |
| for _, row in inversions.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| events.append({ |
| "event_type": "yield_curve_event", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the yield curve inverted: " |
| f"10Y-2Y spread fell to {row['spread']*100:.0f}bps " |
| f"(10Y={row['value_10y']:.2f}%, 2Y={row['value_2y']:.2f}%)." |
| ), |
| }) |
| prev_date = row["date"] |
|
|
| |
| un_inversions = merged[ |
| (merged["spread"] >= config.SCENARIO_YIELD_CURVE_INVERSION) & |
| (merged["prev_spread"] < config.SCENARIO_YIELD_CURVE_INVERSION) |
| ] |
| prev_date = None |
| for _, row in un_inversions.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| events.append({ |
| "event_type": "yield_curve_event", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the yield curve un-inverted: " |
| f"10Y-2Y spread recovered to {row['spread']*100:.0f}bps " |
| f"(10Y={row['value_10y']:.2f}%, 2Y={row['value_2y']:.2f}%)." |
| ), |
| }) |
| prev_date = row["date"] |
|
|
| |
| if len(merged) > window: |
| merged["spread_change"] = merged["spread"] - merged["spread"].shift(window) |
| threshold = config.SCENARIO_YIELD_CURVE_STEEPENING |
| large = merged[merged["spread_change"].abs() >= threshold].dropna(subset=["spread_change"]) |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "steepened" if row["spread_change"] > 0 else "flattened" |
| events.append({ |
| "event_type": "yield_curve_event", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the yield curve {direction} by " |
| f"{abs(row['spread_change'])*100:.0f}bps over {window} days: " |
| f"10Y-2Y spread at {row['spread']*100:.0f}bps." |
| ), |
| }) |
| prev_date = row["date"] |
|
|
| return events |
|
|
|
|
| def _detect_treasury_rate_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large moves in the 10-year Treasury yield.""" |
| window = config.SCENARIO_DGS10_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| delta = config.SCENARIO_DGS10_DELTA |
| df = df.copy() |
| df["abs_change"] = df["value"] - df["value"].shift(window) |
| large = df[df["abs_change"].abs() >= delta].dropna(subset=["abs_change"]) |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "surged" if row["abs_change"] > 0 else "plunged" |
| events.append({ |
| "event_type": "treasury_rate_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the 10-year Treasury yield {direction} " |
| f"{abs(row['abs_change'])*100:.0f}bps over {window} trading days " |
| f"to {row['value']:.2f}%." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_usd_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large moves in the trade-weighted USD index.""" |
| window = config.SCENARIO_USD_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| pct = config.SCENARIO_USD_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].copy() |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "strengthened" if row["pct_change"] > 0 else "weakened" |
| events.append({ |
| "event_type": "usd_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the trade-weighted USD {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over {window} trading days " |
| f"to {row['value']:.1f}." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| |
| |
| |
|
|
| def _detect_mom_change(df: pd.DataFrame, event_type: str, label: str, |
| threshold: float, unit: str = "", fmt: str = ".1f", |
| de_dup_days: int = 28) -> list[dict]: |
| """Generic month-over-month percentage change detector.""" |
| if len(df) < 2: |
| return [] |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change() |
| large = df[df["pct_change"].abs() >= threshold].dropna(subset=["pct_change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < de_dup_days: |
| continue |
| direction = "jumped" if row["pct_change"] > 0 else "dropped" |
| events.append({ |
| "event_type": event_type, |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, {label} {direction} " |
| f"{abs(row['pct_change'])*100:{fmt}}% month-over-month " |
| f"to {row['value']:{fmt}}{unit}." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_level_change(df: pd.DataFrame, event_type: str, label: str, |
| delta: float, window: int, unit: str = "%", |
| de_dup_days: int | None = None) -> list[dict]: |
| """Generic absolute level change detector over a rolling window.""" |
| if len(df) < window: |
| return [] |
| de_dup = de_dup_days or window |
| df = df.copy() |
| df["abs_change"] = df["value"] - df["value"].shift(window) |
| large = df[df["abs_change"].abs() >= delta].dropna(subset=["abs_change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < de_dup: |
| continue |
| direction = "surged" if row["abs_change"] > 0 else "plunged" |
| events.append({ |
| "event_type": event_type, |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, {label} {direction} " |
| f"{abs(row['abs_change'])*100:.0f}bps over {window} periods " |
| f"to {row['value']:.2f}{unit}." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_spike_ratio(df: pd.DataFrame, event_type: str, label: str, |
| ratio: float, window: int, unit: str = "", |
| de_dup_days: int | None = None) -> list[dict]: |
| """Generic spike detector: value > ratio * rolling mean.""" |
| if len(df) < window: |
| return [] |
| de_dup = de_dup_days or window * 7 |
| df = df.copy() |
| df["rolling_mean"] = df["value"].rolling(window, min_periods=window).mean() |
| df = df.dropna(subset=["rolling_mean"]) |
| spike_mask = df["value"] > ratio * df["rolling_mean"] |
| if not spike_mask.any(): |
| return [] |
| events = [] |
| spike_df = df[spike_mask] |
| prev_date = None |
| for _, row in spike_df.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < de_dup: |
| continue |
| events.append({ |
| "event_type": event_type, |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, {label} spiked to {row['value']:.0f}{unit} " |
| f"({row['value']/row['rolling_mean']:.1f}x its {window}-period average " |
| f"of {row['rolling_mean']:.0f}{unit})." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| |
| |
| |
|
|
| def _detect_cpi_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large CPI month-over-month changes.""" |
| return _detect_mom_change(df, "inflation_shock", "CPI", |
| config.SCENARIO_CPI_MOM_THRESHOLD, fmt=".2f") |
|
|
|
|
| def _detect_ppi_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large PPI month-over-month changes.""" |
| return _detect_mom_change(df, "ppi_shock", "PPI", |
| config.SCENARIO_PPI_MOM_THRESHOLD, fmt=".1f") |
|
|
|
|
| def _detect_unemployment_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect unemployment rate jumps.""" |
| if len(df) < 2: |
| return [] |
| df = df.copy() |
| df["change"] = df["value"].diff() |
| large = df[df["change"].abs() >= config.SCENARIO_UNRATE_DELTA].dropna(subset=["change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 28: |
| continue |
| direction = "rose" if row["change"] > 0 else "fell" |
| events.append({ |
| "event_type": "unemployment_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the unemployment rate {direction} " |
| f"{abs(row['change']):.1f}pp to {row['value']:.1f}%." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_jobless_claims_spikes(df: pd.DataFrame) -> list[dict]: |
| """Detect spikes in initial jobless claims.""" |
| return _detect_spike_ratio(df, "jobless_claims_spike", "initial jobless claims", |
| config.SCENARIO_ICSA_SPIKE_RATIO, |
| config.SCENARIO_ICSA_ROLLING_WINDOW, |
| unit="K", de_dup_days=28) |
|
|
|
|
| def _detect_payroll_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in nonfarm payrolls.""" |
| return _detect_mom_change(df, "payroll_shock", "nonfarm payrolls", |
| config.SCENARIO_PAYROLLS_DELTA, fmt=".1f") |
|
|
|
|
| def _detect_hy_spread_events(df: pd.DataFrame) -> list[dict]: |
| """Detect high-yield credit spread blow-outs.""" |
| return _detect_level_change(df, "hy_spread_event", "the high-yield credit spread", |
| config.SCENARIO_HY_SPREAD_DELTA, |
| config.SCENARIO_HY_SPREAD_WINDOW) |
|
|
|
|
| def _detect_ig_spread_events(df: pd.DataFrame) -> list[dict]: |
| """Detect investment-grade corporate spread moves.""" |
| return _detect_level_change(df, "ig_spread_event", "the IG corporate spread", |
| config.SCENARIO_IG_SPREAD_DELTA, |
| config.SCENARIO_IG_SPREAD_WINDOW) |
|
|
|
|
| def _detect_ted_spread_spikes(df: pd.DataFrame) -> list[dict]: |
| """Detect TED spread crossing above threshold.""" |
| if df.empty: |
| return [] |
| df = df.copy() |
| df["prev"] = df["value"].shift(1) |
| crossings = df[(df["value"] >= config.SCENARIO_TED_SPIKE) & |
| (df["prev"] < config.SCENARIO_TED_SPIKE)].dropna(subset=["prev"]) |
| events = [] |
| prev_date = None |
| for _, row in crossings.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 30: |
| continue |
| events.append({ |
| "event_type": "ted_spread_spike", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the TED spread spiked to " |
| f"{row['value']*100:.0f}bps, signaling interbank stress." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_financial_stress(df: pd.DataFrame) -> list[dict]: |
| """Detect financial stress index exceeding threshold.""" |
| if df.empty: |
| return [] |
| threshold = config.SCENARIO_FSI_THRESHOLD |
| df = df.copy() |
| df["prev"] = df["value"].shift(1) |
| crossings = df[(df["value"] >= threshold) & |
| (df["prev"] < threshold)].dropna(subset=["prev"]) |
| events = [] |
| prev_date = None |
| for _, row in crossings.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 60: |
| continue |
| events.append({ |
| "event_type": "financial_stress", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the St. Louis Fed Financial Stress Index " |
| f"rose to {row['value']:.2f}, indicating elevated systemic stress." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_mortgage_rate_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large moves in 30-year mortgage rates.""" |
| return _detect_level_change(df, "mortgage_rate_shock", "the 30-year mortgage rate", |
| config.SCENARIO_MORTGAGE_DELTA, |
| config.SCENARIO_MORTGAGE_ROLLING_WINDOW) |
|
|
|
|
| def _detect_sentiment_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large drops in consumer sentiment.""" |
| if len(df) < config.SCENARIO_SENTIMENT_ROLLING_WINDOW + 1: |
| return [] |
| df = df.copy() |
| w = config.SCENARIO_SENTIMENT_ROLLING_WINDOW |
| df["pct_change"] = df["value"].pct_change(periods=w) |
| large = df[df["pct_change"].abs() >= config.SCENARIO_SENTIMENT_PCT_CHANGE].dropna(subset=["pct_change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 28: |
| continue |
| direction = "surged" if row["pct_change"] > 0 else "plunged" |
| events.append({ |
| "event_type": "sentiment_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, U. of Michigan Consumer Sentiment {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% to {row['value']:.1f}." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_industrial_production_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large changes in industrial production.""" |
| return _detect_mom_change(df, "industrial_production_shock", "industrial production", |
| config.SCENARIO_INDPRO_PCT_CHANGE, fmt=".1f") |
|
|
|
|
| def _detect_retail_sales_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large changes in retail sales.""" |
| return _detect_mom_change(df, "retail_sales_shock", "retail sales", |
| config.SCENARIO_RETAIL_PCT_CHANGE, |
| unit="B", fmt=".0f") |
|
|
|
|
| def _detect_housing_starts_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large changes in housing starts.""" |
| return _detect_mom_change(df, "housing_starts_shock", "housing starts", |
| config.SCENARIO_HOUSING_PCT_CHANGE, fmt=".0f", |
| de_dup_days=28) |
|
|
|
|
| def _detect_home_price_events(df: pd.DataFrame) -> list[dict]: |
| """Detect Case-Shiller home price acceleration/deceleration.""" |
| if len(df) < 13: |
| return [] |
| df = df.copy() |
| df["yoy"] = df["value"].pct_change(periods=12) |
| df["yoy_change"] = df["yoy"] - df["yoy"].shift(3) |
| large = df[df["yoy_change"].abs() >= config.SCENARIO_HOME_PRICE_YOY_DELTA].dropna(subset=["yoy_change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 60: |
| continue |
| direction = "accelerated" if row["yoy_change"] > 0 else "decelerated" |
| events.append({ |
| "event_type": "home_price_event", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, U.S. home price growth {direction}: " |
| f"YoY rate shifted {row['yoy_change']*100:+.1f}pp to " |
| f"{row['yoy']*100:.1f}% (Case-Shiller index at {row['value']:.1f})." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_m2_events(df: pd.DataFrame) -> list[dict]: |
| """Detect M2 money supply contraction or surge.""" |
| if len(df) < 13: |
| return [] |
| df = df.copy() |
| df["yoy"] = df["value"].pct_change(periods=12) |
| events = [] |
| prev_date = None |
| |
| contracting = df[df["yoy"] <= config.SCENARIO_M2_YOY_THRESHOLD].dropna(subset=["yoy"]) |
| for _, row in contracting.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 60: |
| continue |
| events.append({ |
| "event_type": "m2_contraction", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, M2 money supply contracted " |
| f"{abs(row['yoy'])*100:.1f}% year-over-year to " |
| f"${row['value']/1e6:.2f}T, a rare monetary tightening signal." |
| ), |
| }) |
| prev_date = row["date"] |
| |
| prev_date = None |
| surging = df[df["yoy"] >= 0.10].dropna(subset=["yoy"]) |
| for _, row in surging.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 60: |
| continue |
| events.append({ |
| "event_type": "m2_surge", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, M2 money supply surged " |
| f"{row['yoy']*100:.1f}% year-over-year to " |
| f"${row['value']/1e6:.2f}T, signaling aggressive monetary expansion." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_dgs30_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large moves in the 30-year Treasury yield.""" |
| return _detect_level_change(df, "long_bond_shock", "the 30-year Treasury yield", |
| config.SCENARIO_DGS30_DELTA, |
| config.SCENARIO_DGS30_ROLLING_WINDOW) |
|
|
|
|
| def _detect_sp_nasdaq_divergence(sp: pd.DataFrame, nq: pd.DataFrame) -> list[dict]: |
| """Detect S&P 500 vs NASDAQ divergence (sector rotation signals).""" |
| if sp.empty or nq.empty: |
| return [] |
| merged = pd.merge(sp, nq, on="date", suffixes=("_sp", "_nq")).sort_values("date") |
| if len(merged) < config.SCENARIO_SP_NASDAQ_WINDOW: |
| return [] |
| w = config.SCENARIO_SP_NASDAQ_WINDOW |
| merged["sp_ret"] = merged["value_sp"].pct_change(periods=w) |
| merged["nq_ret"] = merged["value_nq"].pct_change(periods=w) |
| merged["divergence"] = merged["nq_ret"] - merged["sp_ret"] |
| large = merged[merged["divergence"].abs() >= config.SCENARIO_SP_NASDAQ_DIVERGENCE].dropna(subset=["divergence"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < w: |
| continue |
| if row["divergence"] > 0: |
| desc = f"NASDAQ outperformed S&P 500 by {row['divergence']*100:.1f}pp" |
| else: |
| desc = f"NASDAQ underperformed S&P 500 by {abs(row['divergence'])*100:.1f}pp" |
| events.append({ |
| "event_type": "sector_rotation", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, {desc} over {w} trading days " |
| f"(NASDAQ {row['nq_ret']*100:+.1f}% vs S&P {row['sp_ret']*100:+.1f}%), " |
| f"signaling sector rotation." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_vix_regime_change(df: pd.DataFrame) -> list[dict]: |
| """Detect sustained elevated VIX (regime change).""" |
| if df.empty: |
| return [] |
| threshold = config.SCENARIO_VIX_REGIME_THRESHOLD |
| min_days = config.SCENARIO_VIX_REGIME_MIN_DAYS |
| df = df.copy() |
| df["elevated"] = df["value"] >= threshold |
| events = [] |
| in_regime = False |
| start_date = None |
| for _, row in df.iterrows(): |
| if row["elevated"] and not in_regime: |
| in_regime = True |
| start_date = row["date"] |
| elif not row["elevated"] and in_regime: |
| duration = (row["date"] - start_date).days |
| if duration >= min_days: |
| events.append({ |
| "event_type": "volatility_regime", |
| "event_date": start_date, |
| "event_description": ( |
| f"Starting {start_date.date()}, VIX remained above " |
| f"{threshold:.0f} for {duration} consecutive days, " |
| f"indicating a sustained high-volatility regime." |
| ), |
| }) |
| in_regime = False |
| |
| if in_regime and start_date is not None: |
| duration = (df["date"].iloc[-1] - start_date).days |
| if duration >= min_days: |
| events.append({ |
| "event_type": "volatility_regime", |
| "event_date": start_date, |
| "event_description": ( |
| f"Starting {start_date.date()}, VIX remained above " |
| f"{threshold:.0f} for {duration}+ days (ongoing), " |
| f"indicating a sustained high-volatility regime." |
| ), |
| }) |
| return events |
|
|
|
|
| def _detect_yield_curve_3m10y(df: pd.DataFrame) -> list[dict]: |
| """Detect 10Y-3M yield curve inversions (classic recession signal).""" |
| if df.empty: |
| return [] |
| df = df.copy() |
| df["prev"] = df["value"].shift(1) |
| events = [] |
| |
| inversions = df[(df["value"] < 0) & (df["prev"] >= 0)].dropna(subset=["prev"]) |
| prev_date = None |
| for _, row in inversions.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 60: |
| continue |
| events.append({ |
| "event_type": "yield_curve_3m10y_inversion", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the 10Y-3M yield curve inverted to " |
| f"{row['value']*100:.0f}bps — a classic recession warning signal." |
| ), |
| }) |
| prev_date = row["date"] |
| |
| un_inversions = df[(df["value"] >= 0) & (df["prev"] < 0)].dropna(subset=["prev"]) |
| prev_date = None |
| for _, row in un_inversions.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 60: |
| continue |
| events.append({ |
| "event_type": "yield_curve_3m10y_uninversion", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the 10Y-3M yield curve un-inverted to " |
| f"{row['value']*100:.0f}bps after a period of inversion." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| |
| |
| |
| |
| |
| |
| |
|
|
| def _detect_fx_shocks(df: pd.DataFrame, pair_name: str) -> list[dict]: |
| """Detect large moves in an FX pair.""" |
| window = config.SCENARIO_FX_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| pct = config.SCENARIO_FX_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].copy() |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "strengthened" if row["pct_change"] > 0 else "weakened" |
| events.append({ |
| "event_type": "fx_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, {pair_name} {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over {window} trading days " |
| f"to {row['value']:.4f}." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_breakeven_inflation_shocks(df: pd.DataFrame, tenor: str) -> list[dict]: |
| """Detect large moves in breakeven inflation rates.""" |
| return _detect_level_change( |
| df, "breakeven_inflation_shock", |
| f"the {tenor} breakeven inflation rate", |
| config.SCENARIO_BEI_DELTA, config.SCENARIO_BEI_ROLLING_WINDOW, |
| ) |
|
|
|
|
| def _detect_djia_moves(df: pd.DataFrame) -> list[dict]: |
| """Detect DJIA large moves over rolling window.""" |
| window = config.SCENARIO_DJIA_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| pct = config.SCENARIO_DJIA_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].copy() |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "rallied" if row["pct_change"] > 0 else "dropped" |
| events.append({ |
| "event_type": "djia_move", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the DJIA {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over {window} trading days " |
| f"to {row['value']:.0f}." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_jolts_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in JOLTS job openings.""" |
| return _detect_mom_change(df, "jolts_shock", "JOLTS job openings", |
| config.SCENARIO_JOLTS_PCT_CHANGE, |
| unit="K", fmt=".0f", |
| de_dup_days=config.SCENARIO_JOLTS_DEDUP_DAYS) |
|
|
|
|
| def _detect_earnings_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in average hourly earnings.""" |
| return _detect_mom_change(df, "earnings_shock", "average hourly earnings", |
| config.SCENARIO_EARNINGS_MOM_THRESHOLD, |
| unit="$/hr", fmt=".2f") |
|
|
|
|
| def _detect_vehicle_sales_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in total vehicle sales.""" |
| return _detect_mom_change(df, "vehicle_sales_shock", "total vehicle sales", |
| config.SCENARIO_VEHICLE_PCT_CHANGE, |
| unit="M", fmt=".1f") |
|
|
|
|
| def _detect_permit_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in building permits.""" |
| return _detect_mom_change(df, "building_permit_shock", "building permits", |
| config.SCENARIO_PERMIT_PCT_CHANGE, |
| unit="K", fmt=".0f") |
|
|
|
|
| def _detect_existing_home_sales_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in existing home sales.""" |
| return _detect_mom_change(df, "existing_home_sales_shock", "existing home sales", |
| config.SCENARIO_EXISTING_HOME_SALES_PCT, |
| unit="K", fmt=".0f") |
|
|
|
|
| def _detect_nfci_events(df: pd.DataFrame) -> list[dict]: |
| """Detect Chicago Fed NFCI crossing above 0 (tighter than average).""" |
| if df.empty: |
| return [] |
| threshold = config.SCENARIO_NFCI_THRESHOLD |
| df = df.copy() |
| df["prev"] = df["value"].shift(1) |
| |
| crossings_up = df[(df["value"] >= threshold) & |
| (df["prev"] < threshold)].dropna(subset=["prev"]) |
| |
| crossings_down = df[(df["value"] < threshold) & |
| (df["prev"] >= threshold)].dropna(subset=["prev"]) |
| events = [] |
| prev_date = None |
| for _, row in crossings_up.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 30: |
| continue |
| events.append({ |
| "event_type": "nfci_tightening", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the Chicago Fed NFCI rose to " |
| f"{row['value']:.3f}, crossing above 0 — signaling tighter-than-average " |
| f"financial conditions." |
| ), |
| }) |
| prev_date = row["date"] |
| prev_date = None |
| for _, row in crossings_down.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < 30: |
| continue |
| events.append({ |
| "event_type": "nfci_loosening", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the Chicago Fed NFCI fell to " |
| f"{row['value']:.3f}, crossing below 0 — signaling easing " |
| f"financial conditions." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_fed_balance_sheet_events(df: pd.DataFrame) -> list[dict]: |
| """Detect large changes in Fed balance sheet (WALCL).""" |
| window = config.SCENARIO_FED_BS_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| pct = config.SCENARIO_FED_BS_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].dropna(subset=["pct_change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window * 7: |
| continue |
| direction = "expanded" if row["pct_change"] > 0 else "contracted" |
| events.append({ |
| "event_type": "fed_balance_sheet", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the Fed balance sheet {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over {window} weeks " |
| f"to ${row['value']/1e6:.2f}T." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_monetary_base_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in the monetary base.""" |
| return _detect_mom_change(df, "monetary_base_shock", "the monetary base", |
| config.SCENARIO_MONETARY_BASE_PCT, |
| unit="B", fmt=".0f") |
|
|
|
|
| def _detect_business_loan_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in C&I loans.""" |
| return _detect_mom_change(df, "business_loan_shock", "C&I loans", |
| config.SCENARIO_BUSLOANS_PCT_CHANGE, |
| unit="B", fmt=".0f") |
|
|
|
|
| def _detect_pce_inflation_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large month-over-month changes in PCE price index.""" |
| return _detect_mom_change(df, "pce_inflation_shock", "PCE price index", |
| config.SCENARIO_PCEPI_MOM_THRESHOLD, |
| fmt=".2f") |
|
|
|
|
| def _detect_sofr_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect large moves in SOFR rate.""" |
| return _detect_level_change(df, "sofr_shock", "the SOFR rate", |
| config.SCENARIO_SOFR_DELTA, |
| config.SCENARIO_SOFR_WINDOW) |
|
|
|
|
| def _detect_wti_oil_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect WTI oil shocks from FRED daily data (DCOILWTICO).""" |
| window = config.SCENARIO_OIL_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| pct = config.SCENARIO_OIL_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].copy() |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "surged" if row["pct_change"] > 0 else "plunged" |
| events.append({ |
| "event_type": "wti_oil_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, WTI crude oil {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over {window} trading days " |
| f"to ${row['value']:.2f}/bbl." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_henry_hub_shocks(df: pd.DataFrame) -> list[dict]: |
| """Detect Henry Hub natural gas shocks from FRED daily data (DHHNGSP).""" |
| window = config.SCENARIO_OIL_ROLLING_WINDOW |
| if len(df) < window: |
| return [] |
| pct = config.SCENARIO_NATGAS_PCT_CHANGE |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct].copy() |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "surged" if row["pct_change"] > 0 else "plunged" |
| events.append({ |
| "event_type": "henry_hub_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, Henry Hub natural gas {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over {window} trading days " |
| f"to ${row['value']:.2f}/MMBtu." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| |
| |
| |
|
|
| def _detect_real_yield_shocks(dgs10: pd.DataFrame, bei: pd.DataFrame) -> list[dict]: |
| """Detect real yield (DGS10 - T10YIE) large moves.""" |
| if dgs10.empty or bei.empty: |
| return [] |
| merged = pd.merge(dgs10, bei, on="date", suffixes=("_nom", "_bei")).sort_values("date") |
| if merged.empty: |
| return [] |
| merged["real_yield"] = merged["value_nom"] - merged["value_bei"] |
| window = config.SCENARIO_REAL_YIELD_WINDOW |
| if len(merged) < window: |
| return [] |
| merged["change"] = merged["real_yield"] - merged["real_yield"].shift(window) |
| delta = config.SCENARIO_REAL_YIELD_DELTA |
| large = merged[merged["change"].abs() >= delta].dropna(subset=["change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "surged" if row["change"] > 0 else "plunged" |
| events.append({ |
| "event_type": "real_yield_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the real yield (10Y nominal - breakeven) " |
| f"{direction} {abs(row['change'])*100:.0f}bps over {window} days " |
| f"to {row['real_yield']:.2f}%." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_credit_compression(hy: pd.DataFrame, ig: pd.DataFrame) -> list[dict]: |
| """Detect credit compression/expansion (HY spread - IG spread).""" |
| if hy.empty or ig.empty: |
| return [] |
| merged = pd.merge(hy, ig, on="date", suffixes=("_hy", "_ig")).sort_values("date") |
| if merged.empty: |
| return [] |
| merged["gap"] = merged["value_hy"] - merged["value_ig"] |
| window = config.SCENARIO_CREDIT_COMPRESSION_WINDOW |
| if len(merged) < window: |
| return [] |
| merged["change"] = merged["gap"] - merged["gap"].shift(window) |
| delta = config.SCENARIO_CREDIT_COMPRESSION_DELTA |
| large = merged[merged["change"].abs() >= delta].dropna(subset=["change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| if row["change"] > 0: |
| desc = "widened (risk aversion)" |
| else: |
| desc = "compressed (risk appetite)" |
| events.append({ |
| "event_type": "credit_compression", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the HY-IG credit spread gap {desc} " |
| f"by {abs(row['change'])*100:.0f}bps over {window} days " |
| f"to {row['gap']*100:.0f}bps." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_term_premium_shocks(dgs30: pd.DataFrame, dgs2: pd.DataFrame) -> list[dict]: |
| """Detect term premium (DGS30 - DGS2) large moves.""" |
| if dgs30.empty or dgs2.empty: |
| return [] |
| merged = pd.merge(dgs30, dgs2, on="date", suffixes=("_30", "_2")).sort_values("date") |
| if merged.empty: |
| return [] |
| merged["spread"] = merged["value_30"] - merged["value_2"] |
| window = config.SCENARIO_TERM_PREMIUM_WINDOW |
| if len(merged) < window: |
| return [] |
| merged["change"] = merged["spread"] - merged["spread"].shift(window) |
| delta = config.SCENARIO_TERM_PREMIUM_DELTA |
| large = merged[merged["change"].abs() >= delta].dropna(subset=["change"]) |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window: |
| continue |
| direction = "steepened" if row["change"] > 0 else "flattened" |
| events.append({ |
| "event_type": "term_premium_shock", |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, the 30Y-2Y term premium {direction} " |
| f"by {abs(row['change'])*100:.0f}bps over {window} days " |
| f"to {row['spread']*100:.0f}bps " |
| f"(30Y={row['value_30']:.2f}%, 2Y={row['value_2']:.2f}%)." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| |
| |
| |
|
|
| def _detect_short_term_shocks(df: pd.DataFrame, event_type: str, label: str, |
| pct_threshold: float, window: int, |
| unit: str = "", fmt: str = ".0f") -> list[dict]: |
| """Generic short-term percentage shock detector.""" |
| if len(df) < window: |
| return [] |
| df = df.copy() |
| df["pct_change"] = df["value"].pct_change(periods=window) |
| large = df[df["pct_change"].abs() >= pct_threshold].copy() |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window * 2: |
| continue |
| direction = "surged" if row["pct_change"] > 0 else "plunged" |
| events.append({ |
| "event_type": event_type, |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, {label} {direction} " |
| f"{abs(row['pct_change'])*100:.1f}% over just {window} trading days " |
| f"to {row['value']:{fmt}}{unit} — an acute short-term shock." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| def _detect_short_term_level_shocks(df: pd.DataFrame, event_type: str, label: str, |
| delta: float, window: int, |
| unit: str = "%") -> list[dict]: |
| """Generic short-term absolute-level shock detector.""" |
| if len(df) < window: |
| return [] |
| df = df.copy() |
| df["change"] = df["value"] - df["value"].shift(window) |
| large = df[df["change"].abs() >= delta].dropna(subset=["change"]) |
| if large.empty: |
| return [] |
| events = [] |
| prev_date = None |
| for _, row in large.iterrows(): |
| if prev_date is not None and (row["date"] - prev_date).days < window * 2: |
| continue |
| direction = "surged" if row["change"] > 0 else "plunged" |
| events.append({ |
| "event_type": event_type, |
| "event_date": row["date"], |
| "event_description": ( |
| f"On {row['date'].date()}, {label} {direction} " |
| f"{abs(row['change'])*100:.0f}bps over just {window} trading days " |
| f"to {row['value']:.2f}{unit} — a rapid rate move." |
| ), |
| }) |
| prev_date = row["date"] |
| return events |
|
|
|
|
| |
| |
| |
|
|
| def run(granularity: str | None = None) -> pd.DataFrame: |
| """Detect all scenario events and save to benchmark directory. |
| |
| Returns the scenarios DataFrame. |
| """ |
| if granularity is None: |
| granularity = config.GRANULARITY |
|
|
| out_dir = config.DATA_DIR / "benchmark" / granularity |
| out_dir.mkdir(parents=True, exist_ok=True) |
|
|
| all_events: list[dict] = [] |
|
|
| |
| fed = _load_fred("FEDFUNDS") |
| fed_events = _detect_fed_rate_changes(fed) |
| all_events.extend(fed_events) |
| logger.info("Fed rate changes: %d events.", len(fed_events)) |
|
|
| |
| vix = _load_fred("VIXCLS") |
| vix_events = _detect_vix_spikes(vix) |
| all_events.extend(vix_events) |
| logger.info("VIX spikes: %d events.", len(vix_events)) |
|
|
| |
| crude = _load_crude_spot() |
| if not crude.empty: |
| oil_events = _detect_oil_shocks(crude) |
| all_events.extend(oil_events) |
| logger.info("Oil shocks: %d events.", len(oil_events)) |
| else: |
| logger.warning("No crude oil spot data available; skipping oil shock detection.") |
|
|
| |
| natgas = _load_natgas_spot() |
| if not natgas.empty: |
| ng_events = _detect_natgas_shocks(natgas) |
| all_events.extend(ng_events) |
| logger.info("Natural gas shocks: %d events.", len(ng_events)) |
| else: |
| logger.warning("No natural gas spot data available; skipping.") |
|
|
| |
| sp500 = _load_fred("SP500") |
| dd_events = _detect_market_drawdowns(sp500) |
| all_events.extend(dd_events) |
| logger.info("Market drawdowns: %d events.", len(dd_events)) |
|
|
| |
| nasdaq = _load_fred("NASDAQCOM") |
| nasdaq_events = _detect_nasdaq_moves(nasdaq) |
| all_events.extend(nasdaq_events) |
| logger.info("NASDAQ moves: %d events.", len(nasdaq_events)) |
|
|
| |
| dgs10 = _load_fred("DGS10") |
| dgs2 = _load_fred("DGS2") |
| yc_events = _detect_yield_curve_events(dgs10, dgs2) |
| all_events.extend(yc_events) |
| logger.info("Yield curve events: %d events.", len(yc_events)) |
|
|
| |
| tr_events = _detect_treasury_rate_shocks(dgs10) |
| all_events.extend(tr_events) |
| logger.info("Treasury rate shocks: %d events.", len(tr_events)) |
|
|
| |
| usd = _load_fred("DTWEXBGS") |
| usd_events = _detect_usd_shocks(usd) |
| all_events.extend(usd_events) |
| logger.info("USD shocks: %d events.", len(usd_events)) |
|
|
| |
| dgs30 = _load_fred("DGS30") |
| if not dgs30.empty: |
| dgs30_events = _detect_dgs30_shocks(dgs30) |
| all_events.extend(dgs30_events) |
| logger.info("30Y Treasury shocks: %d events.", len(dgs30_events)) |
|
|
| |
| cpi = _load_fred("CPIAUCSL") |
| if not cpi.empty: |
| cpi_events = _detect_cpi_shocks(cpi) |
| all_events.extend(cpi_events) |
| logger.info("CPI inflation shocks: %d events.", len(cpi_events)) |
|
|
| |
| ppi = _load_fred("PPIACO") |
| if not ppi.empty: |
| ppi_events = _detect_ppi_shocks(ppi) |
| all_events.extend(ppi_events) |
| logger.info("PPI shocks: %d events.", len(ppi_events)) |
|
|
| |
| unrate = _load_fred("UNRATE") |
| if not unrate.empty: |
| un_events = _detect_unemployment_shocks(unrate) |
| all_events.extend(un_events) |
| logger.info("Unemployment shocks: %d events.", len(un_events)) |
|
|
| |
| icsa = _load_fred("ICSA") |
| if not icsa.empty: |
| icsa_events = _detect_jobless_claims_spikes(icsa) |
| all_events.extend(icsa_events) |
| logger.info("Jobless claims spikes: %d events.", len(icsa_events)) |
|
|
| |
| payems = _load_fred("PAYEMS") |
| if not payems.empty: |
| pay_events = _detect_payroll_shocks(payems) |
| all_events.extend(pay_events) |
| logger.info("Payroll shocks: %d events.", len(pay_events)) |
|
|
| |
| hy = _load_fred("BAMLH0A0HYM2") |
| if not hy.empty: |
| hy_events = _detect_hy_spread_events(hy) |
| all_events.extend(hy_events) |
| logger.info("HY spread events: %d events.", len(hy_events)) |
|
|
| |
| ig = _load_fred("BAMLC0A0CM") |
| if not ig.empty: |
| ig_events = _detect_ig_spread_events(ig) |
| all_events.extend(ig_events) |
| logger.info("IG spread events: %d events.", len(ig_events)) |
|
|
| |
| ted = _load_fred("TEDRATE") |
| if not ted.empty: |
| ted_events = _detect_ted_spread_spikes(ted) |
| all_events.extend(ted_events) |
| logger.info("TED spread spikes: %d events.", len(ted_events)) |
|
|
| |
| fsi = _load_fred("STLFSI2") |
| if not fsi.empty: |
| fsi_events = _detect_financial_stress(fsi) |
| all_events.extend(fsi_events) |
| logger.info("Financial stress events: %d events.", len(fsi_events)) |
|
|
| |
| mort = _load_fred("MORTGAGE30US") |
| if not mort.empty: |
| mort_events = _detect_mortgage_rate_shocks(mort) |
| all_events.extend(mort_events) |
| logger.info("Mortgage rate shocks: %d events.", len(mort_events)) |
|
|
| |
| sent = _load_fred("UMCSENT") |
| if not sent.empty: |
| sent_events = _detect_sentiment_shocks(sent) |
| all_events.extend(sent_events) |
| logger.info("Sentiment shocks: %d events.", len(sent_events)) |
|
|
| |
| indpro = _load_fred("INDPRO") |
| if not indpro.empty: |
| ip_events = _detect_industrial_production_shocks(indpro) |
| all_events.extend(ip_events) |
| logger.info("Industrial production shocks: %d events.", len(ip_events)) |
|
|
| |
| retail = _load_fred("RSAFS") |
| if not retail.empty: |
| rs_events = _detect_retail_sales_shocks(retail) |
| all_events.extend(rs_events) |
| logger.info("Retail sales shocks: %d events.", len(rs_events)) |
|
|
| |
| houst = _load_fred("HOUST") |
| if not houst.empty: |
| hs_events = _detect_housing_starts_shocks(houst) |
| all_events.extend(hs_events) |
| logger.info("Housing starts shocks: %d events.", len(hs_events)) |
|
|
| |
| cshpi = _load_fred("CSUSHPISA") |
| if not cshpi.empty: |
| hp_events = _detect_home_price_events(cshpi) |
| all_events.extend(hp_events) |
| logger.info("Home price events: %d events.", len(hp_events)) |
|
|
| |
| m2 = _load_fred("M2SL") |
| if not m2.empty: |
| m2_events = _detect_m2_events(m2) |
| all_events.extend(m2_events) |
| logger.info("M2 money supply events: %d events.", len(m2_events)) |
|
|
| |
| if not sp500.empty and not nasdaq.empty: |
| div_events = _detect_sp_nasdaq_divergence(sp500, nasdaq) |
| all_events.extend(div_events) |
| logger.info("Sector rotation events: %d events.", len(div_events)) |
|
|
| |
| if not vix.empty: |
| regime_events = _detect_vix_regime_change(vix) |
| all_events.extend(regime_events) |
| logger.info("Volatility regime events: %d events.", len(regime_events)) |
|
|
| |
| t10y3m = _load_fred("T10Y3M") |
| if not t10y3m.empty: |
| yc3m_events = _detect_yield_curve_3m10y(t10y3m) |
| all_events.extend(yc3m_events) |
| logger.info("Yield curve 3M-10Y events: %d events.", len(yc3m_events)) |
|
|
| |
| djia = _load_fred("DJIA") |
| if not djia.empty: |
| djia_events = _detect_djia_moves(djia) |
| all_events.extend(djia_events) |
| logger.info("DJIA moves: %d events.", len(djia_events)) |
|
|
| |
| wti = _load_fred("DCOILWTICO") |
| if not wti.empty: |
| wti_events = _detect_wti_oil_shocks(wti) |
| all_events.extend(wti_events) |
| logger.info("WTI oil shocks (FRED): %d events.", len(wti_events)) |
|
|
| |
| hh = _load_fred("DHHNGSP") |
| if not hh.empty: |
| hh_events = _detect_henry_hub_shocks(hh) |
| all_events.extend(hh_events) |
| logger.info("Henry Hub gas shocks (FRED): %d events.", len(hh_events)) |
|
|
| |
| for series_id, pair_name in [ |
| ("DEXUSEU", "USD/EUR"), ("DEXJPUS", "JPY/USD"), |
| ("DEXUSUK", "USD/GBP"), ("DEXCHUS", "CNY/USD"), |
| ]: |
| fx = _load_fred(series_id) |
| if not fx.empty: |
| fx_events = _detect_fx_shocks(fx, pair_name) |
| all_events.extend(fx_events) |
| logger.info("FX shocks (%s): %d events.", pair_name, len(fx_events)) |
|
|
| |
| for series_id, tenor in [("T10YIE", "10-year"), ("T5YIE", "5-year")]: |
| bei = _load_fred(series_id) |
| if not bei.empty: |
| bei_events = _detect_breakeven_inflation_shocks(bei, tenor) |
| all_events.extend(bei_events) |
| logger.info("Breakeven inflation (%s): %d events.", tenor, len(bei_events)) |
|
|
| |
| pcepi = _load_fred("PCEPI") |
| if not pcepi.empty: |
| pce_events = _detect_pce_inflation_shocks(pcepi) |
| all_events.extend(pce_events) |
| logger.info("PCE inflation shocks: %d events.", len(pce_events)) |
|
|
| |
| sofr = _load_fred("SOFR") |
| if not sofr.empty: |
| sofr_events = _detect_sofr_shocks(sofr) |
| all_events.extend(sofr_events) |
| logger.info("SOFR shocks: %d events.", len(sofr_events)) |
|
|
| |
| jolts = _load_fred("JTSJOL") |
| if not jolts.empty: |
| jolts_events = _detect_jolts_shocks(jolts) |
| all_events.extend(jolts_events) |
| logger.info("JOLTS shocks: %d events.", len(jolts_events)) |
|
|
| |
| earnings = _load_fred("CES0500000003") |
| if not earnings.empty: |
| earn_events = _detect_earnings_shocks(earnings) |
| all_events.extend(earn_events) |
| logger.info("Earnings shocks: %d events.", len(earn_events)) |
|
|
| |
| vehicles = _load_fred("TOTALSA") |
| if not vehicles.empty: |
| veh_events = _detect_vehicle_sales_shocks(vehicles) |
| all_events.extend(veh_events) |
| logger.info("Vehicle sales shocks: %d events.", len(veh_events)) |
|
|
| |
| permits = _load_fred("PERMIT") |
| if not permits.empty: |
| perm_events = _detect_permit_shocks(permits) |
| all_events.extend(perm_events) |
| logger.info("Building permit shocks: %d events.", len(perm_events)) |
|
|
| |
| ehs = _load_fred("EXHOSLUSM495S") |
| if not ehs.empty: |
| ehs_events = _detect_existing_home_sales_shocks(ehs) |
| all_events.extend(ehs_events) |
| logger.info("Existing home sales shocks: %d events.", len(ehs_events)) |
|
|
| |
| nfci = _load_fred("NFCI") |
| if not nfci.empty: |
| nfci_events = _detect_nfci_events(nfci) |
| all_events.extend(nfci_events) |
| logger.info("NFCI events: %d events.", len(nfci_events)) |
|
|
| |
| walcl = _load_fred("WALCL") |
| if not walcl.empty: |
| bs_events = _detect_fed_balance_sheet_events(walcl) |
| all_events.extend(bs_events) |
| logger.info("Fed balance sheet events: %d events.", len(bs_events)) |
|
|
| |
| bogm = _load_fred("BOGMBASE") |
| if not bogm.empty: |
| bogm_events = _detect_monetary_base_shocks(bogm) |
| all_events.extend(bogm_events) |
| logger.info("Monetary base shocks: %d events.", len(bogm_events)) |
|
|
| |
| busloans = _load_fred("BUSLOANS") |
| if not busloans.empty: |
| bl_events = _detect_business_loan_shocks(busloans) |
| all_events.extend(bl_events) |
| logger.info("Business loan shocks: %d events.", len(bl_events)) |
|
|
| |
|
|
| |
| bei_10y = _load_fred("T10YIE") |
| if not dgs10.empty and not bei_10y.empty: |
| ry_events = _detect_real_yield_shocks(dgs10, bei_10y) |
| all_events.extend(ry_events) |
| logger.info("Real yield shocks: %d events.", len(ry_events)) |
|
|
| |
| if not hy.empty and not ig.empty: |
| cc_events = _detect_credit_compression(hy, ig) |
| all_events.extend(cc_events) |
| logger.info("Credit compression events: %d events.", len(cc_events)) |
|
|
| |
| if not dgs30.empty and not dgs2.empty: |
| tp_events = _detect_term_premium_shocks(dgs30, dgs2) |
| all_events.extend(tp_events) |
| logger.info("Term premium shocks: %d events.", len(tp_events)) |
|
|
| |
|
|
| |
| if not sp500.empty: |
| sp_short = _detect_short_term_shocks( |
| sp500, "sp500_acute_shock", "the S&P 500", |
| config.SCENARIO_SP500_SHORT_DRAWDOWN, |
| config.SCENARIO_SP500_SHORT_WINDOW) |
| all_events.extend(sp_short) |
| logger.info("S&P 500 acute shocks (5d): %d events.", len(sp_short)) |
|
|
| |
| if not nasdaq.empty: |
| nq_short = _detect_short_term_shocks( |
| nasdaq, "nasdaq_acute_shock", "the NASDAQ", |
| config.SCENARIO_NASDAQ_SHORT_PCT, |
| config.SCENARIO_NASDAQ_SHORT_WINDOW) |
| all_events.extend(nq_short) |
| logger.info("NASDAQ acute shocks (5d): %d events.", len(nq_short)) |
|
|
| |
| if not wti.empty: |
| oil_short = _detect_short_term_shocks( |
| wti, "oil_acute_shock", "WTI crude oil", |
| config.SCENARIO_OIL_SHORT_PCT, |
| config.SCENARIO_OIL_SHORT_WINDOW, |
| unit="$/bbl", fmt=".2f") |
| all_events.extend(oil_short) |
| logger.info("Oil acute shocks (5d): %d events.", len(oil_short)) |
|
|
| |
| if not dgs10.empty: |
| dgs10_short = _detect_short_term_level_shocks( |
| dgs10, "treasury_acute_shock", "the 10Y Treasury yield", |
| config.SCENARIO_DGS10_SHORT_DELTA, |
| config.SCENARIO_DGS10_SHORT_WINDOW) |
| all_events.extend(dgs10_short) |
| logger.info("10Y Treasury acute shocks (5d): %d events.", len(dgs10_short)) |
|
|
| |
| if all_events: |
| df = pd.DataFrame(all_events) |
| df["event_date"] = pd.to_datetime(df["event_date"]) |
| df = df.sort_values("event_date").reset_index(drop=True) |
| df["scenario_id"] = [f"sc_{i:04d}" for i in range(len(df))] |
| df["pre_window_start"] = df["event_date"] - pd.Timedelta(days=config.SCENARIO_PRE_WINDOW_DAYS) |
| df["post_window_end"] = df["event_date"] + pd.Timedelta(days=config.SCENARIO_POST_WINDOW_DAYS) |
| |
| df = df[["scenario_id", "event_type", "event_date", "event_description", |
| "pre_window_start", "post_window_end"]] |
| else: |
| df = pd.DataFrame(columns=[ |
| "scenario_id", "event_type", "event_date", "event_description", |
| "pre_window_start", "post_window_end", |
| ]) |
|
|
| |
| |
| |
| |
| if not df.empty: |
| panel_start = pd.Timestamp(config.START_DATE) |
| panel_end = pd.Timestamp(config.END_DATE) |
| |
| |
| min_event = panel_start + pd.Timedelta(days=35) |
| max_event = panel_end - pd.Timedelta(days=35) |
| before = len(df) |
| df = df[(df["event_date"] >= min_event) & (df["event_date"] <= max_event)].copy() |
| |
| df = df.sort_values("event_date").reset_index(drop=True) |
| df["scenario_id"] = [f"sc_{i:04d}" for i in range(len(df))] |
| dropped = before - len(df) |
| if dropped > 0: |
| logger.info("Filtered %d scenarios outside valid panel window [%s, %s]", |
| dropped, panel_start.date(), max_event.date()) |
|
|
| df.to_parquet(out_dir / "scenarios.parquet", index=False) |
| logger.info("Saved %d scenario events -> %s", len(df), out_dir / "scenarios.parquet") |
| return df |
|
|