|
|
import yaml |
|
|
import os |
|
|
import logging |
|
|
import pandas as pd |
|
|
from logger import setup_logger |
|
|
|
|
|
|
|
|
from data_loader import DataLoader |
|
|
from pair_selector import PairSelector |
|
|
from kalman_hedge import KalmanHedge |
|
|
from signal_generator import SignalGenerator |
|
|
from backtester import Backtester |
|
|
from risk_engine import RiskEngine |
|
|
from portfolio_optimizer import PortfolioOptimizer |
|
|
|
|
|
|
|
|
logger = setup_logger("PairTradingStrategy") |
|
|
|
|
|
def load_config(path: str) -> dict: |
|
|
with open(path, "r") as f: |
|
|
cfg = yaml.safe_load(f) |
|
|
return cfg |
|
|
|
|
|
def calc_portfolio_metrics(returns): |
|
|
returns = returns.fillna(0) |
|
|
ann_return = (1 + returns).prod() ** (252 / len(returns)) - 1 |
|
|
ann_vol = returns.std() * (252 ** 0.5) |
|
|
sharpe = ann_return / ann_vol if ann_vol != 0 else float("nan") |
|
|
cum = (1 + returns).cumprod() |
|
|
peak = cum.cummax() |
|
|
drawdown = (cum - peak) / peak |
|
|
max_dd = drawdown.min() |
|
|
return { |
|
|
"annual_return": ann_return, |
|
|
"annual_vol": ann_vol, |
|
|
"sharpe": sharpe, |
|
|
"max_drawdown": max_dd, |
|
|
} |
|
|
|
|
|
def main(): |
|
|
|
|
|
config_path = os.path.join(os.path.dirname(__file__), "../config.yaml") |
|
|
cfg = load_config(config_path) |
|
|
logger.info("Configuration loaded.") |
|
|
|
|
|
|
|
|
data_cfg = cfg["data"] |
|
|
dl = DataLoader( |
|
|
tickers=data_cfg["tickers"], |
|
|
start_date=data_cfg["start_date"], |
|
|
end_date=data_cfg["end_date"], |
|
|
interval=data_cfg["interval"] |
|
|
) |
|
|
prices, volume = dl.fetch_data() |
|
|
|
|
|
|
|
|
ps_cfg = cfg["pair_selector"] |
|
|
pair_selector = PairSelector( |
|
|
prices=prices, |
|
|
cluster_size=ps_cfg["cluster_size"], |
|
|
coint_pval_threshold=ps_cfg["coint_pval_threshold"], |
|
|
rolling_window=ps_cfg["rolling_window"], |
|
|
rolling_step=ps_cfg["rolling_step"], |
|
|
min_valid_periods=ps_cfg["min_valid_periods"] |
|
|
) |
|
|
pairs_df = pair_selector.select_pairs() |
|
|
if pairs_df.empty: |
|
|
logger.error("No suitable pairs found. Exiting.") |
|
|
return |
|
|
logger.info(f"Number of selected pairs: {len(pairs_df)}") |
|
|
|
|
|
|
|
|
all_pair_returns = {} |
|
|
results_summary = [] |
|
|
for idx, row in pairs_df.iterrows(): |
|
|
t1 = row["ticker1"] |
|
|
t2 = row["ticker2"] |
|
|
logger.info(f"Processing pair {t1}-{t2}.") |
|
|
|
|
|
s1 = prices[t1] |
|
|
s2 = prices[t2] |
|
|
|
|
|
|
|
|
km_cfg = cfg["kalman"] |
|
|
kh = KalmanHedge( |
|
|
observation_series=s1, |
|
|
control_series=s2, |
|
|
initial_state_cov=km_cfg["initial_state_cov"], |
|
|
transition_cov=km_cfg["transition_cov"], |
|
|
observation_cov=km_cfg["observation_cov"], |
|
|
em_iterations=km_cfg["em_iterations"] |
|
|
) |
|
|
kalman_df = kh.run_filter() |
|
|
|
|
|
|
|
|
sig_cfg = cfg["signal"] |
|
|
sg = SignalGenerator( |
|
|
price1=s1, |
|
|
price2=s2, |
|
|
kalman_df=kalman_df, |
|
|
config=sig_cfg |
|
|
) |
|
|
trade_df = sg.generate(costs=cfg["costs"], volume=volume[[t1, t2]]) |
|
|
|
|
|
|
|
|
bt = Backtester( |
|
|
trade_df=trade_df, |
|
|
costs=cfg["costs"], |
|
|
volume=volume[[t1, t2]], |
|
|
ticker1=t1, |
|
|
ticker2=t2, |
|
|
) |
|
|
bt_results = bt.run() |
|
|
metrics = bt.performance_metrics(bt_results) |
|
|
logger.info(f"Pair {t1}-{t2} metrics: {metrics}") |
|
|
|
|
|
|
|
|
all_pair_returns[f"{t1}/{t2}"] = bt_results["strategy_return"] |
|
|
|
|
|
|
|
|
results_summary.append({ |
|
|
"pair": f"{t1}/{t2}", |
|
|
**metrics, |
|
|
"half_life": row["half_life"] |
|
|
}) |
|
|
|
|
|
|
|
|
pair_returns_df = ( |
|
|
pd.DataFrame(all_pair_returns) |
|
|
.dropna(how="all") |
|
|
) |
|
|
|
|
|
|
|
|
summary_df = pd.DataFrame(results_summary) |
|
|
output_dir = os.path.join(os.path.dirname(__file__), "../output") |
|
|
os.makedirs(output_dir, exist_ok=True) |
|
|
summary_path = os.path.join(output_dir, "pair_summary.csv") |
|
|
summary_df.to_csv(summary_path, index=False) |
|
|
logger.info(f"Saved pair summary to {summary_path}.") |
|
|
|
|
|
|
|
|
selected_pairs = summary_df[summary_df["sharpe"] > 0]["pair"].tolist() |
|
|
if not selected_pairs: |
|
|
logger.warning("No pairs with Sharpe > 0 were found. Portfolio will not be constructed.") |
|
|
return |
|
|
pair_returns_df_selected = pair_returns_df[selected_pairs] |
|
|
logger.info(f"Selected pairs with Sharpe > 0: {selected_pairs}") |
|
|
|
|
|
|
|
|
port_cfg = cfg["portfolio"] |
|
|
po = PortfolioOptimizer( |
|
|
pair_returns=pair_returns_df_selected, |
|
|
min_weight=port_cfg["min_weight"], |
|
|
max_weight=port_cfg["max_weight"] |
|
|
) |
|
|
weights = po.min_variance() |
|
|
|
|
|
|
|
|
portfolio_ret = (pair_returns_df_selected * weights).sum(axis=1) |
|
|
re = RiskEngine(returns=portfolio_ret, config=cfg["risk"]) |
|
|
var_h = re.historical_var() |
|
|
var_p = re.parametric_var() |
|
|
max_dd = re.max_drawdown() |
|
|
logger.info(f"Portfolio VaR (hist) = {var_h:.4%}, (param) = {var_p:.4%}, max DD = {max_dd:.4%}") |
|
|
|
|
|
|
|
|
portfolio_metrics = calc_portfolio_metrics(portfolio_ret) |
|
|
logger.info( |
|
|
f"Portfolio annual_return={portfolio_metrics['annual_return']:.4%}, " |
|
|
f"annual_vol={portfolio_metrics['annual_vol']:.4%}, " |
|
|
f"sharpe={portfolio_metrics['sharpe']:.2f}, " |
|
|
f"max_drawdown={portfolio_metrics['max_drawdown']:.2%}" |
|
|
) |
|
|
|
|
|
|
|
|
weights_path = os.path.join(output_dir, "portfolio_weights.csv") |
|
|
weights.to_csv(weights_path, header=True) |
|
|
logger.info(f"Saved portfolio weights to {weights_path}.") |
|
|
|
|
|
pd.DataFrame([portfolio_metrics]).to_csv( |
|
|
os.path.join(output_dir, "portfolio_metrics.csv"), |
|
|
index=False |
|
|
) |
|
|
logger.info(f"Saved portfolio metrics to {os.path.join(output_dir, 'portfolio_metrics.csv')}.") |
|
|
|
|
|
logger.info("Backtest pipeline completed successfully.") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|