Dmitry Beresnev commited on
Commit
a4fc70c
Β·
1 Parent(s): 8e87652

fix data downloader

Browse files
Files changed (2) hide show
  1. portfolio_calculator.py +132 -49
  2. requirements.txt +2 -0
portfolio_calculator.py CHANGED
@@ -15,7 +15,14 @@ import numpy as np
15
  import pandas as pd
16
  import yfinance as yf
17
  import streamlit as st
18
- import time
 
 
 
 
 
 
 
19
 
20
 
21
  # Constants
@@ -24,6 +31,92 @@ MIN_DATA_POINTS = 30
24
  MAX_TICKERS = 20
25
 
26
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  @st.cache_data(ttl=3600) # Cache for 1 hour
28
  def fetch_historical_data(
29
  tickers: Tuple[str, ...], # Tuple for hashability (caching requirement)
@@ -73,60 +166,50 @@ def fetch_historical_data(
73
 
74
  except Exception as e:
75
  # Log the error but continue to fallback strategy
76
- print(f"Batch download failed: {str(e)}, trying individual downloads...")
 
 
 
77
 
78
- # Strategy 2: Download one ticker at a time with retries
79
- st.info("Fetching data individually for each ticker (this may take a moment)...")
80
  individual_prices = {}
81
  failed_tickers = []
82
 
83
- for idx, ticker in enumerate(ticker_list):
84
- max_retries = 3
85
- retry_delay = 2 # seconds
86
-
87
- for attempt in range(max_retries):
88
- try:
89
- # Add delay between requests to avoid rate limiting (except first ticker)
90
- if idx > 0 or attempt > 0:
91
- time.sleep(retry_delay)
92
-
93
- # Use Ticker object for more reliable downloads
94
- ticker_obj = yf.Ticker(ticker)
95
-
96
- # Try different methods
97
- if attempt == 0:
98
- # Method 1: history with period
99
- hist = ticker_obj.history(period=period)
100
- elif attempt == 1:
101
- # Method 2: Try shorter period
102
- hist = ticker_obj.history(period="6mo")
103
- else:
104
- # Method 3: Try start/end dates
105
- import datetime
106
- end_date = datetime.datetime.now()
107
- start_date = end_date - datetime.timedelta(days=365)
108
- hist = ticker_obj.history(start=start_date, end=end_date)
109
-
110
- if not hist.empty:
111
- # Extract close prices
112
- individual_prices[ticker] = hist['Close']
113
- st.success(f"βœ… {ticker}: {len(hist)} days of data")
114
- break # Success, exit retry loop
115
- else:
116
- if attempt < max_retries - 1:
117
- st.warning(f"⚠️ {ticker}: No data, retrying... (attempt {attempt + 1}/{max_retries})")
118
  else:
119
- st.error(f"❌ {ticker}: No data after {max_retries} attempts")
120
- failed_tickers.append(ticker)
121
-
122
- except Exception as e:
123
- if attempt < max_retries - 1:
124
- st.warning(f"⚠️ {ticker}: Error, retrying... (attempt {attempt + 1}/{max_retries})")
125
- retry_delay *= 2 # Exponential backoff
126
- else:
127
  failed_tickers.append(ticker)
128
- st.error(f"❌ {ticker}: Failed after {max_retries} attempts")
129
- print(f"Failed to fetch {ticker}: {str(e)}")
 
 
 
 
130
 
131
  # Check if we got any data
132
  if not individual_prices:
 
15
  import pandas as pd
16
  import yfinance as yf
17
  import streamlit as st
18
+ from concurrent.futures import ProcessPoolExecutor, as_completed
19
+ import logging
20
+ from tenacity import retry, stop_after_attempt, wait_exponential, retry_if_exception_type
21
+ from ratelimit import limits, sleep_and_retry
22
+
23
+ # Configure logging
24
+ logging.basicConfig(level=logging.INFO)
25
+ logger = logging.getLogger(__name__)
26
 
27
 
28
  # Constants
 
31
  MAX_TICKERS = 20
32
 
33
 
34
+ # Rate limiter: Max 5 calls per 10 seconds per ticker (conservative for Yahoo Finance)
35
+ @sleep_and_retry
36
+ @limits(calls=5, period=10)
37
+ @retry(
38
+ stop=stop_after_attempt(3), # Max 3 retries
39
+ wait=wait_exponential(multiplier=1, min=2, max=60), # Exponential backoff: 2s, 4s, 8s, ... max 60s
40
+ retry=retry_if_exception_type((ConnectionError, TimeoutError)), # Only retry on network errors
41
+ reraise=True
42
+ )
43
+ def _fetch_with_retry(ticker_obj, method: str, period: str = "1y"):
44
+ """
45
+ Internal function with retry logic for fetching data.
46
+
47
+ Args:
48
+ ticker_obj: yfinance Ticker object
49
+ method: Which method to use ('period', 'short', 'dates')
50
+ period: Time period
51
+
52
+ Returns:
53
+ Historical data or None
54
+ """
55
+ if method == 'period':
56
+ return ticker_obj.history(period=period)
57
+ elif method == 'short':
58
+ return ticker_obj.history(period="6mo")
59
+ else: # dates
60
+ import datetime
61
+ end_date = datetime.datetime.now()
62
+ start_date = end_date - datetime.timedelta(days=365)
63
+ return ticker_obj.history(start=start_date, end=end_date)
64
+
65
+
66
+ def fetch_single_ticker(ticker: str, period: str = "1y") -> Tuple[str, Optional[pd.Series], Optional[str]]:
67
+ """
68
+ Fetch historical data for a single ticker with rate limiting and exponential backoff.
69
+
70
+ This function runs in a separate process for parallel execution.
71
+ Uses tenacity for exponential backoff and ratelimit for request throttling.
72
+
73
+ Args:
74
+ ticker: Ticker symbol
75
+ period: Time period for historical data
76
+
77
+ Returns:
78
+ Tuple of (ticker, price_series, error_message)
79
+ """
80
+ try:
81
+ # Create fresh Ticker object in this process
82
+ ticker_obj = yf.Ticker(ticker)
83
+
84
+ # Try Method 1: Standard period with retry logic
85
+ try:
86
+ hist = _fetch_with_retry(ticker_obj, 'period', period)
87
+ if not hist.empty:
88
+ logger.info(f"βœ… {ticker}: Fetched {len(hist)} days (method: period)")
89
+ return ticker, hist['Close'], None
90
+ except Exception as e:
91
+ logger.warning(f"⚠️ {ticker}: Method 1 failed - {str(e)}")
92
+
93
+ # Try Method 2: Shorter period
94
+ try:
95
+ hist = _fetch_with_retry(ticker_obj, 'short', period)
96
+ if not hist.empty:
97
+ logger.info(f"βœ… {ticker}: Fetched {len(hist)} days (method: short)")
98
+ return ticker, hist['Close'], None
99
+ except Exception as e:
100
+ logger.warning(f"⚠️ {ticker}: Method 2 failed - {str(e)}")
101
+
102
+ # Try Method 3: Explicit dates (most reliable)
103
+ try:
104
+ hist = _fetch_with_retry(ticker_obj, 'dates', period)
105
+ if not hist.empty:
106
+ logger.info(f"βœ… {ticker}: Fetched {len(hist)} days (method: dates)")
107
+ return ticker, hist['Close'], None
108
+ except Exception as e:
109
+ logger.warning(f"⚠️ {ticker}: Method 3 failed - {str(e)}")
110
+
111
+ # All methods failed
112
+ logger.error(f"❌ {ticker}: All methods exhausted")
113
+ return ticker, None, "No data available after all retry attempts"
114
+
115
+ except Exception as e:
116
+ logger.error(f"❌ {ticker}: Fatal error - {str(e)}")
117
+ return ticker, None, str(e)
118
+
119
+
120
  @st.cache_data(ttl=3600) # Cache for 1 hour
121
  def fetch_historical_data(
122
  tickers: Tuple[str, ...], # Tuple for hashability (caching requirement)
 
166
 
167
  except Exception as e:
168
  # Log the error but continue to fallback strategy
169
+ logger.warning(f"Batch download failed: {str(e)}, trying individual downloads...")
170
+
171
+ # Strategy 2: Parallel download using ProcessPoolExecutor
172
+ st.info(f"πŸ“₯ Fetching data for {len(ticker_list)} tickers in parallel...")
173
 
 
 
174
  individual_prices = {}
175
  failed_tickers = []
176
 
177
+ # Determine number of workers (max 4 to avoid overwhelming the API)
178
+ max_workers = min(len(ticker_list), 4)
179
+
180
+ try:
181
+ # Use ProcessPoolExecutor for true parallel execution
182
+ with ProcessPoolExecutor(max_workers=max_workers) as executor:
183
+ # Submit all ticker fetch jobs
184
+ future_to_ticker = {
185
+ executor.submit(fetch_single_ticker, ticker, period): ticker
186
+ for ticker in ticker_list
187
+ }
188
+
189
+ # Process results as they complete
190
+ completed = 0
191
+ for future in as_completed(future_to_ticker):
192
+ ticker = future_to_ticker[future]
193
+ completed += 1
194
+
195
+ try:
196
+ ticker_symbol, price_series, error = future.result(timeout=30)
197
+
198
+ if price_series is not None and not price_series.empty:
199
+ individual_prices[ticker_symbol] = price_series
200
+ st.success(f"βœ… {ticker_symbol}: {len(price_series)} days ({completed}/{len(ticker_list)})")
 
 
 
 
 
 
 
 
 
 
 
201
  else:
202
+ failed_tickers.append(ticker_symbol)
203
+ st.error(f"❌ {ticker_symbol}: {error or 'No data'} ({completed}/{len(ticker_list)})")
204
+
205
+ except Exception as e:
 
 
 
 
206
  failed_tickers.append(ticker)
207
+ st.error(f"❌ {ticker}: {str(e)} ({completed}/{len(ticker_list)})")
208
+
209
+ except Exception as e:
210
+ st.error(f"Parallel processing error: {str(e)}")
211
+ # Fall back to empty result
212
+ pass
213
 
214
  # Check if we got any data
215
  if not individual_prices:
requirements.txt CHANGED
@@ -6,3 +6,5 @@ pandas==2.2.0
6
  numpy==1.26.3
7
  sympy==1.12
8
  matplotlib==3.8.2
 
 
 
6
  numpy==1.26.3
7
  sympy==1.12
8
  matplotlib==3.8.2
9
+ tenacity==8.2.3
10
+ ratelimit==2.2.1