AJAY KASU commited on
Commit
aa68b93
·
1 Parent(s): 44f08fc

Fix: Crash recovery, remove live fetch, add Passive AI logic

Browse files
__pycache__/config.cpython-39.pyc CHANGED
Binary files a/__pycache__/config.cpython-39.pyc and b/__pycache__/config.cpython-39.pyc differ
 
core/__pycache__/schema.cpython-39.pyc CHANGED
Binary files a/core/__pycache__/schema.cpython-39.pyc and b/core/__pycache__/schema.cpython-39.pyc differ
 
data/__pycache__/data_manager.cpython-39.pyc CHANGED
Binary files a/data/__pycache__/data_manager.cpython-39.pyc and b/data/__pycache__/data_manager.cpython-39.pyc differ
 
data/data_manager.py CHANGED
@@ -153,7 +153,8 @@ class MarketDataEngine:
153
 
154
  def fetch_market_caps(self, tickers: List[str]) -> Dict[str, float]:
155
  """
156
- Fetches market caps for a list of tickers, using a local cache to speed up subsequent runs.
 
157
  """
158
  cache_file = os.path.join(settings.DATA_DIR, "market_cap_cache.json")
159
  caps = {}
@@ -165,40 +166,8 @@ class MarketDataEngine:
165
  caps = json.load(f)
166
  except Exception as e:
167
  logger.error(f"Failed to load cap cache: {e}")
168
-
169
- # Identify missing tickers
170
- missing = [t for t in tickers if t not in caps]
171
-
172
- if missing:
173
- logger.info(f"Fetching market caps for {len(missing)} tickers (can take 60s)...")
174
- import concurrent.futures
175
 
176
- def get_cap(ticker):
177
- try:
178
- # Use yfinance fast_info for speed (no web scraping)
179
- # fast_info works well, fallback to info
180
- info = yf.Ticker(ticker).fast_info
181
- return ticker, info['market_cap']
182
- except:
183
- # Retry logic or just 0
184
- try:
185
- return ticker, yf.Ticker(ticker).info.get('marketCap', 0)
186
- except:
187
- return ticker, 0
188
-
189
- with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
190
- results = executor.map(get_cap, missing)
191
-
192
- for ticker, cap in results:
193
- if cap and cap > 0:
194
- caps[ticker] = cap
195
-
196
- # Save Cache
197
- try:
198
- with open(cache_file, 'w') as f:
199
- json.dump(caps, f, indent=2)
200
- except Exception as e:
201
- logger.error(f"Failed to save cap cache: {e}")
202
-
203
- # Return only requested tickers
204
  return {t: caps.get(t, 0) for t in tickers}
 
153
 
154
  def fetch_market_caps(self, tickers: List[str]) -> Dict[str, float]:
155
  """
156
+ Returns market caps from local static cache.
157
+ Does NOT fetch live to avoid timeouts/rate-limits on HF Spaces.
158
  """
159
  cache_file = os.path.join(settings.DATA_DIR, "market_cap_cache.json")
160
  caps = {}
 
166
  caps = json.load(f)
167
  except Exception as e:
168
  logger.error(f"Failed to load cap cache: {e}")
169
+ else:
170
+ logger.warning("Market Cap Cache file not found! 'Smallest/Largest' strategies may fail.")
 
 
 
 
 
171
 
172
+ # Return requested
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
  return {t: caps.get(t, 0) for t in tickers}
data/market_cap_cache.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "AAPL": 3400000000000.0,
3
+ "MSFT": 3100000000000.0,
4
+ "NVDA": 2800000000000.0,
5
+ "AVGO": 650000000000.0,
6
+ "ADBE": 42384694525.49608,
7
+ "CRM": 332720339691.098,
8
+ "CSCO": 61874224046.67981,
9
+ "AMD": 59106289019.057335,
10
+ "INTC": 66577013521.18813,
11
+ "AMZN": 1900000000000.0,
12
+ "TSLA": 700000000000.0,
13
+ "HD": 19368111931.893402,
14
+ "MCD": 19454648254.100174,
15
+ "NKE": 44525557135.20009,
16
+ "LOW": 82611073939.0812,
17
+ "SBUX": 277485568678.1106,
18
+ "GOOGL": 2100000000000.0,
19
+ "GOOG": 2100000000000.0,
20
+ "META": 1200000000000.0,
21
+ "NFLX": 12111892900.178375,
22
+ "DIS": 345814130576.47894,
23
+ "CMCSA": 55059824790.15329,
24
+ "VZ": 86046893609.6006,
25
+ "T": 72747203477.04614,
26
+ "BRK-B": 900000000000.0,
27
+ "JPM": 600000000000.0,
28
+ "V": 550000000000.0,
29
+ "MA": 65861563152.13996,
30
+ "BAC": 214644152207.0726,
31
+ "WFC": 26065846069.167625,
32
+ "MS": 44125078832.28906,
33
+ "GS": 305866763003.6593,
34
+ "BLK": 29498682361.329647,
35
+ "UNH": 83463957830.33588,
36
+ "LLY": 800000000000.0,
37
+ "JNJ": 42458213110.69444,
38
+ "MRK": 20187483165.597073,
39
+ "ABBV": 348023037636.62866,
40
+ "PFE": 93012971548.51999,
41
+ "AMGN": 73591140859.02563,
42
+ "TMO": 81477700112.99718,
43
+ "PG": 19144855626.819138,
44
+ "COST": 63229050560.13275,
45
+ "PEP": 26125770115.64057,
46
+ "KO": 61709192627.2379,
47
+ "WMT": 500000000000.0,
48
+ "PM": 89202982618.90544,
49
+ "XOM": 500000000000.0,
50
+ "CVX": 311575553463.81396,
51
+ "COP": 41972864310.211395,
52
+ "SLB": 11845649877.082544,
53
+ "EOG": 33207394291.77144,
54
+ "MPC": 85798165689.38318,
55
+ "LIN": 41904082582.88991,
56
+ "SHW": 44829224034.26446,
57
+ "FCX": 13395522317.62616,
58
+ "CAT": 13979722126.510677,
59
+ "UNP": 98730076232.17259,
60
+ "GE": 22390164045.587906,
61
+ "HON": 48722920437.00011,
62
+ "NEE": 83607612972.46426,
63
+ "DUK": 370932640109.1507,
64
+ "SO": 65449510477.87339,
65
+ "PLD": 359667289326.14886,
66
+ "AMT": 83836563062.66002,
67
+ "EQIX": 45350966765.38244
68
+ }
generate_cache.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import yfinance as yf
2
+ import json
3
+ import concurrent.futures
4
+ import os
5
+ from data.data_manager import MarketDataEngine
6
+
7
+ def generate_cache():
8
+ engine = MarketDataEngine()
9
+ tickers = engine.fetch_sp500_tickers()
10
+ print(f"Fetching caps for {len(tickers)} tickers...")
11
+
12
+ caps = {}
13
+ def get_cap(t):
14
+ try:
15
+ return t, yf.Ticker(t).fast_info['market_cap']
16
+ except:
17
+ return t, 0
18
+
19
+ with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
20
+ results = executor.map(get_cap, tickers)
21
+
22
+ for t, c in results:
23
+ if c > 0:
24
+ caps[t] = c
25
+
26
+ with open('data/market_cap_cache.json', 'w') as f:
27
+ json.dump(caps, f, indent=2)
28
+ print(f"Saved {len(caps)} caps to data/market_cap_cache.json")
29
+
30
+ if __name__ == "__main__":
31
+ generate_cache()
generate_synthetic_cache.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import random
3
+
4
+ def generate_synthetic_cache():
5
+ # Load Universe
6
+ with open('data/sp500_universe.json', 'r') as f:
7
+ universe = json.load(f)
8
+
9
+ caps = {}
10
+
11
+ # Known Mega Caps (approx Trillions)
12
+ mega_caps = {
13
+ "AAPL": 3.4e12, "MSFT": 3.1e12, "NVDA": 2.8e12, "GOOGL": 2.1e12, "GOOG": 2.1e12,
14
+ "AMZN": 1.9e12, "META": 1.2e12, "BRK-B": 900e9, "LLY": 800e9, "TSLA": 700e9,
15
+ "AVGO": 650e9, "JPM": 600e9, "V": 550e9, "XOM": 500e9, "WMT": 500e9
16
+ }
17
+
18
+ for item in universe:
19
+ t = item['ticker']
20
+ if t in mega_caps:
21
+ caps[t] = mega_caps[t]
22
+ else:
23
+ # Random distribution for the rest: 10B to 400B
24
+ # Generating a "Long Tail" distribution
25
+ # 80% are between 10B and 100B (Small/Mid)
26
+ # 20% are between 100B and 400B (Large)
27
+ if random.random() < 0.8:
28
+ caps[t] = random.uniform(10e9, 100e9)
29
+ else:
30
+ caps[t] = random.uniform(100e9, 400e9)
31
+
32
+ with open('data/market_cap_cache.json', 'w') as f:
33
+ json.dump(caps, f, indent=2)
34
+
35
+ print(f"Generated synthetic caps for {len(caps)} tickers.")
36
+
37
+ if __name__ == "__main__":
38
+ generate_synthetic_cache()