File size: 6,814 Bytes
5995ef5 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 | """Step 3: Collect daily stock prices (OHLCV + Adj Close).
Downloads daily OHLCV data for the full ticker universe using
yf.download() in batches of PRICE_BATCH_SIZE.
Uses ``auto_adjust=False`` to preserve the ``Adj Close`` column,
which is required for the shares_outstanding derivation in Layer 2.
Includes retry with backoff for rate-limited batches, per-batch
checkpointing (so crashes don't lose all progress), and filters out
tickers/rows where ``Close`` is entirely NaN (junk/delisted symbols).
Output: data/prices/daily_prices.csv
"""
from __future__ import annotations
import logging
import os
import tempfile
import time
import pandas as pd
import yfinance as yf
from . import config
logger = logging.getLogger(__name__)
MAX_BATCH_RETRIES = 3
def _download_batch_with_retry(
batch: list[str],
start: str,
end: str,
retries: int = MAX_BATCH_RETRIES,
) -> pd.DataFrame | None:
"""Download a batch of tickers with retry on rate-limit errors."""
for attempt in range(retries):
try:
df = yf.download(
batch,
start=start,
end=end,
group_by="ticker",
auto_adjust=False,
threads=True,
)
if df is not None and not df.empty:
return df
return None
except Exception as exc:
err_str = str(exc)
if "Too Many Requests" in err_str or "Rate" in err_str:
wait = 2 ** attempt * 5 # 5s, 10s, 20s
logger.warning("Batch rate-limited (attempt %d/%d), waiting %ds ...",
attempt + 1, retries, wait)
time.sleep(wait)
continue
logger.warning("Batch download failed: %s", exc)
return None
logger.warning("Batch exhausted %d retries.", retries)
return None
def _reshape_batch(raw: pd.DataFrame, batch: list[str]) -> pd.DataFrame:
"""Reshape a yfinance MultiIndex batch to long format."""
records = []
if isinstance(raw.columns, pd.MultiIndex):
for ticker in raw.columns.get_level_values(0).unique():
try:
sub = raw[ticker].copy()
sub = sub.reset_index()
sub["Ticker"] = ticker
records.append(sub)
except Exception as exc:
logger.warning("Could not reshape ticker %s: %s", ticker, exc)
continue
else:
# Single ticker case
raw = raw.reset_index()
raw["Ticker"] = batch[0] if len(batch) == 1 else "UNKNOWN"
records.append(raw)
if not records:
return pd.DataFrame()
return pd.concat(records, ignore_index=True)
def _atomic_csv_write(df: pd.DataFrame, dest) -> None:
"""Write CSV atomically via temp file + rename."""
dest_parent = dest.parent if hasattr(dest, "parent") else os.path.dirname(dest)
fd, tmp_path = tempfile.mkstemp(suffix=".csv", dir=dest_parent)
try:
os.close(fd)
df.to_csv(tmp_path, index=False)
os.replace(tmp_path, str(dest))
except BaseException:
try:
os.unlink(tmp_path)
except OSError:
pass
raise
def run(tickers: list[str] | None = None) -> pd.DataFrame:
"""Execute Step 3 and return the daily prices DataFrame."""
config.PRICES_DIR.mkdir(parents=True, exist_ok=True)
out_path = config.PRICES_DIR / "daily_prices.csv"
checkpoint_path = config.PRICES_DIR / "_prices_checkpoint.csv"
if out_path.exists():
logger.info("Daily prices file already exists at %s, loading.", out_path)
return pd.read_csv(out_path, parse_dates=["Date"])
if tickers is None:
universe_path = config.UNIVERSE_DIR / "benchmark_universe.csv"
if not universe_path.exists():
raise FileNotFoundError(f"Run Step 1 first: {universe_path}")
tickers = pd.read_csv(universe_path)["ticker"].tolist()
# Resume from checkpoint if it exists
existing = pd.DataFrame()
already_done: set[str] = set()
if checkpoint_path.exists():
try:
existing = pd.read_csv(checkpoint_path)
already_done = set(existing["Ticker"].unique())
logger.info("Resuming from checkpoint: %d tickers already downloaded.", len(already_done))
except Exception:
logger.warning("Checkpoint file corrupt, starting fresh.")
existing = pd.DataFrame()
remaining = [t for t in tickers if t not in already_done]
logger.info("Downloading daily prices for %d tickers (%d already done) ...",
len(remaining), len(already_done))
batch_size = config.PRICE_BATCH_SIZE
batches_since_checkpoint = 0
for i in range(0, len(remaining), batch_size):
batch = remaining[i : i + batch_size]
logger.info("Downloading batch %d-%d / %d remaining", i, i + len(batch), len(remaining))
raw = _download_batch_with_retry(batch, config.START_DATE, config.END_DATE)
if raw is not None:
reshaped = _reshape_batch(raw, batch)
if not reshaped.empty:
existing = pd.concat([existing, reshaped], ignore_index=True)
batches_since_checkpoint += 1
# Checkpoint every 5 batches (~250 tickers)
if batches_since_checkpoint >= 5 and not existing.empty:
_atomic_csv_write(existing, checkpoint_path)
batches_since_checkpoint = 0
logger.info(" Checkpoint saved (%d rows, %d tickers).",
len(existing), existing["Ticker"].nunique())
if existing.empty:
logger.warning("No price data downloaded.")
return pd.DataFrame()
# Standardize column names
col_map = {c: c.strip() for c in existing.columns}
result = existing.rename(columns=col_map)
# Filter out rows where Close is NaN (junk/delisted tickers, pre-listing dates)
before_len = len(result)
result = result.dropna(subset=["Close"])
dropped = before_len - len(result)
if dropped > 0:
logger.info("Filtered %d rows with NaN Close (kept %d).", dropped, len(result))
# Report tickers with zero valid rows
valid_tickers = result["Ticker"].nunique()
all_tickers_set = set(tickers)
tickers_in_result = set(result["Ticker"].unique())
missing = all_tickers_set - tickers_in_result
if missing:
logger.info("%d tickers had no valid price data: %s",
len(missing), sorted(missing))
_atomic_csv_write(result, out_path)
logger.info("Saved daily prices (%d rows, %d tickers) to %s",
len(result), valid_tickers, out_path)
# Clean up checkpoint
if checkpoint_path.exists():
checkpoint_path.unlink()
return result
|