File size: 6,763 Bytes
72ce2c5 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 | """event_study.py — Event-Driven Alpha & Event Study Analysis
Detects and analyzes market events (earnings, macro releases, M&A, FDA decisions)
to generate abnormal returns and event-driven trading signals.
References:
- MacKinlay 1997: "Event Studies in Economics and Finance"
- Engelberg et al. 2012: "Market Madness? The Case of Mad Money"
- Savor & Wilson 2014: "Asset Pricing: A Tale of Two Days"
"""
import numpy as np, pandas as pd
from scipy import stats
class EventStudyAnalyzer:
"""Event study analysis for earnings, macro, and corporate events."""
def __init__(self, estimation_window=252, event_window=(-5, 20)):
self.est_window = estimation_window
self.ev_window = event_window
def abnormal_returns(self, prices, market_prices, event_dates):
"""Compute abnormal returns around event dates."""
ret = prices.pct_change().dropna()
mkt_ret = market_prices.pct_change().dropna()
# Market model: R_i = alpha + beta * R_m + epsilon
common = ret.index.intersection(mkt_ret.index)
y = ret.loc[common].values
X = np.column_stack([np.ones(len(common)), mkt_ret.loc[common].values])
beta = np.linalg.lstsq(X, y, rcond=None)[0]
expected = beta[0] + beta[1] * mkt_ret
abnormal = ret - expected.reindex(ret.index, fill_value=0)
results = []
for date in event_dates:
try:
idx = ret.index.get_loc(pd.Timestamp(date))
start = max(0, idx + self.ev_window[0])
end = min(len(ret), idx + self.ev_window[1] + 1)
ar = abnormal.iloc[start:end]
car = ar.cumsum()
results.append({
'event_date': date,
'car_day0': float(ar.iloc[self.ev_window[0]*-1] if len(ar) > self.ev_window[0]*-1 else np.nan),
'car_window': float(car.iloc[-1] - car.iloc[0]),
'max_car': float(car.max()),
'min_car': float(car.min()),
'volatility_event': float(ar.std()),
't_stat': float(ar.mean() / (ar.std() + 1e-10) * np.sqrt(len(ar)))
})
except Exception:
continue
return pd.DataFrame(results)
def earnings_event_signal(self, prices, earnings_dates, surprise_threshold=0.05):
"""Generate post-earnings drift (PED) signal.
Positive earnings surprise → buy for 1-60 days.
"""
ret = prices.pct_change().dropna()
signals = pd.Series(0.0, index=prices.index)
for ed in earnings_dates:
try:
idx = ret.index.get_loc(pd.Timestamp(ed))
if idx + 1 < len(ret):
# 1-day reaction as proxy for surprise
day1_ret = ret.iloc[idx + 1]
if abs(day1_ret) > surprise_threshold:
# Signal for next 5 days
for d in range(1, 6):
if idx + 1 + d < len(signals):
signals.iloc[idx + 1 + d] = np.sign(day1_ret) * 0.2
except Exception:
continue
return signals
def macro_event_signal(self, prices, macro_dates, volatility_threshold=0.015):
"""Signal around macro events (FOMC, NFP, CPI).
Pre-event volatility compression → post-event volatility expansion.
"""
ret = prices.pct_change().dropna()
vol = ret.rolling(20).std()
signals = pd.Series(0.0, index=prices.index)
for md in macro_dates:
try:
idx = ret.index.get_loc(pd.Timestamp(md))
if idx >= 20 and idx + 5 < len(ret):
pre_vol = vol.iloc[idx - 5]
post_vol = vol.iloc[idx:idx+5].mean()
if pre_vol < volatility_threshold and post_vol > pre_vol * 1.5:
signals.iloc[idx:idx+5] = np.sign(ret.iloc[idx]) * 0.3
except Exception:
continue
return signals
def merger_arbitrage_signal(self, acquirer_prices, target_prices, spread_threshold=0.02):
"""Merger arbitrage: long target, short acquirer when deal spread exists."""
target_ret = target_prices.pct_change().dropna()
acquirer_ret = acquirer_prices.pct_change().dropna()
spread = (target_prices / acquirer_prices - 1).dropna()
signal = pd.Series(0.0, index=target_prices.index)
signal[spread > spread_threshold] = 1.0 # Long target
signal[spread < -spread_threshold] = -0.5 # Short acquirer
return signal
def event_calendar(self, ticker):
"""Generate synthetic event calendar for a ticker."""
# In production, this would call an API (EarningsWhispers, etc.)
today = pd.Timestamp.now()
return pd.DataFrame({
'date': [today + pd.DateOffset(days=30), today + pd.DateOffset(days=60), today + pd.DateOffset(days=90)],
'event': ['Q3 Earnings', 'FOMC Meeting', 'Q4 GDP Release'],
'type': ['earnings', 'macro', 'macro'],
'impact': ['high', 'high', 'medium']
})
def report(self, prices, market_prices, event_dates, event_type='earnings'):
"""Full event study report."""
ar = self.abnormal_returns(prices, market_prices, event_dates)
if ar.empty:
return "## Event Study\n\nNo valid events found in data range."
mean_car = ar['car_window'].mean()
t_stat = ar['car_window'].mean() / (ar['car_window'].std() + 1e-10) * np.sqrt(len(ar))
p_val = 2 * (1 - stats.t.cdf(abs(t_stat), len(ar)-1))
return f"""## Event Study: {event_type.upper()}
| Statistic | Value |
|-----------|-------|
| Events analyzed | {len(ar)} |
| Mean CAR (window) | {mean_car*100:.2f}% |
| Median CAR | {ar['car_window'].median()*100:.2f}% |
| Day-0 abnormal return | {ar['car_day0'].mean()*100:.2f}% |
| t-statistic | {t_stat:.2f} |
| p-value | {p_val:.4f} |
| Significant? | {'✅ Yes' if p_val < 0.05 else '❌ No'} |
**Interpretation:** {mean_car > 0 and 'Positive' or 'Negative'} post-event drift of {abs(mean_car)*100:.2f}% on average.
"""
if __name__ == '__main__':
np.random.seed(42)
prices = pd.Series(np.cumprod(1 + np.random.normal(0.0005, 0.015, 500)),
index=pd.date_range('2022-01-01', periods=500, freq='B'))
market = pd.Series(np.cumprod(1 + np.random.normal(0.0003, 0.012, 500)),
index=pd.date_range('2022-01-01', periods=500, freq='B'))
events = ['2022-03-15', '2022-06-15', '2022-09-15', '2022-12-15']
esa = EventStudyAnalyzer()
print(esa.report(prices, market, events))
|