Riy777 commited on
Commit
879205b
·
verified ·
1 Parent(s): e765f32

Update backtest_engine.py

Browse files
Files changed (1) hide show
  1. backtest_engine.py +152 -108
backtest_engine.py CHANGED
@@ -1,5 +1,5 @@
1
  # ============================================================
2
- # 🧪 backtest_engine.py (V101.1 - GEM-Architect: Silent Mode)
3
  # ============================================================
4
 
5
  import asyncio
@@ -31,6 +31,7 @@ class HeavyDutyBacktester:
31
  def __init__(self, data_manager, processor):
32
  self.dm = data_manager
33
  self.proc = processor
 
34
  self.GRID_DENSITY = 6
35
  self.INITIAL_CAPITAL = 10.0
36
  self.TRADING_FEES = 0.001
@@ -52,17 +53,14 @@ class HeavyDutyBacktester:
52
  self.force_end_date = None
53
 
54
  if not os.path.exists(CACHE_DIR): os.makedirs(CACHE_DIR)
55
- print(f"🧪 [Backtest V101.1] Smart Adaptive Grid (Silent Hydra).")
56
 
57
  def set_date_range(self, start_str, end_str):
58
  self.force_start_date = start_str
59
  self.force_end_date = end_str
60
 
61
- # ... (Rest of fetch and vector logic unchanged) ...
62
- # ... Keeping standard methods to save space ...
63
-
64
  # ==============================================================
65
- # ⚡ FAST DATA DOWNLOADER (Same as before)
66
  # ==============================================================
67
  async def _fetch_all_data_fast(self, sym, start_ms, end_ms):
68
  print(f" ⚡ [Network] Downloading {sym}...", flush=True)
@@ -105,7 +103,17 @@ class HeavyDutyBacktester:
105
  print(f" ✅ Downloaded {len(unique_candles)} candles.", flush=True)
106
  return unique_candles
107
 
 
 
 
108
  def _calculate_indicators_vectorized(self, df):
 
 
 
 
 
 
 
109
  delta = df['close'].diff()
110
  gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
111
  loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
@@ -114,16 +122,29 @@ class HeavyDutyBacktester:
114
  df['ema20'] = df['close'].ewm(span=20, adjust=False).mean()
115
  df['ema50'] = df['close'].ewm(span=50, adjust=False).mean()
116
 
 
117
  high_low = df['high'] - df['low']
118
  high_close = (df['high'] - df['close'].shift()).abs()
119
  low_close = (df['low'] - df['close'].shift()).abs()
120
  ranges = pd.concat([high_low, high_close, low_close], axis=1)
121
  true_range = ranges.max(axis=1)
122
  df['atr'] = true_range.rolling(14).mean()
 
 
 
 
 
 
123
  df['vol_ma20'] = df['volume'].rolling(window=20).mean()
 
 
 
124
  df.fillna(0, inplace=True)
125
  return df
126
 
 
 
 
127
  async def _process_data_in_memory(self, sym, candles, start_ms, end_ms):
128
  safe_sym = sym.replace('/', '_')
129
  period_suffix = f"{start_ms}_{end_ms}"
@@ -133,12 +154,10 @@ class HeavyDutyBacktester:
133
  print(f" 📂 [{sym}] Data Exists -> Skipping.")
134
  return
135
 
136
- print(f" ⚙️ [CPU] Analyzing {sym} (Silent Profiling)...", flush=True)
137
  t0 = time.time()
138
 
139
  df_1m = pd.DataFrame(candles, columns=['timestamp', 'open', 'high', 'low', 'close', 'volume'])
140
- cols = ['open', 'high', 'low', 'close', 'volume']
141
- df_1m[cols] = df_1m[cols].astype('float32')
142
  df_1m['datetime'] = pd.to_datetime(df_1m['timestamp'], unit='ms')
143
  df_1m.set_index('datetime', inplace=True)
144
  df_1m = df_1m.sort_index()
@@ -148,8 +167,21 @@ class HeavyDutyBacktester:
148
  time_indices = {}
149
  agg_dict = {'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'volume': 'sum'}
150
 
 
151
  frames['1m'] = df_1m.copy()
152
  frames['1m']['timestamp'] = frames['1m'].index.floor('1min').astype(np.int64) // 10**6
 
 
 
 
 
 
 
 
 
 
 
 
153
  col_order = ['timestamp', 'open', 'high', 'low', 'close', 'volume']
154
  numpy_frames['1m'] = frames['1m'][col_order].values
155
  time_indices['1m'] = frames['1m'].index
@@ -164,8 +196,8 @@ class HeavyDutyBacktester:
164
  time_indices[tf_str] = resampled.index
165
 
166
  ai_results = []
167
- valid_idx_5m = time_indices['5m']
168
 
 
169
  df_5m_aligned = frames['5m'].copy()
170
  df_1h_aligned = frames['1h'].reindex(frames['5m'].index, method='ffill')
171
  df_15m_aligned = frames['15m'].reindex(frames['5m'].index, method='ffill')
@@ -185,21 +217,23 @@ class HeavyDutyBacktester:
185
 
186
  rsi_oversold = (df_1h_aligned['rsi'] >= 20) & (df_1h_aligned['rsi'] <= 40)
187
  price_drop = change_4h <= -2.0
188
- body = (df_15m_aligned['close'] - df_15m_aligned['open']).abs()
189
- lower_wick = df_15m_aligned[['open', 'close']].min(axis=1) - df_15m_aligned['low']
190
- is_hammer = lower_wick > (body * 1.5)
191
  is_green = df_15m_aligned['close'] > df_15m_aligned['open']
192
- is_reversal = filters_pass & rsi_oversold & price_drop & (is_hammer | is_green)
193
-
194
- valid_mask = is_breakout | is_reversal
195
- valid_indices = df_5m_aligned[valid_mask].index
196
-
197
  start_dt = df_1m.index[0] + pd.Timedelta(minutes=500)
198
  final_valid_indices = [t for t in valid_indices if t >= start_dt]
199
 
200
- total_hits = len(final_valid_indices)
201
- print(f" 🎯 Found {total_hits} signals.", flush=True)
202
-
 
 
 
 
 
 
 
203
  for i, current_time in enumerate(final_valid_indices):
204
  idx_1m = time_indices['1m'].searchsorted(current_time, side='right') - 1
205
  idx_5m = time_indices['5m'].searchsorted(current_time, side='right') - 1
@@ -213,102 +247,115 @@ class HeavyDutyBacktester:
213
  ohlcv_1h = numpy_frames['1h'][idx_1h-60+1 : idx_1h+1].tolist()
214
  ohlcv_15m = numpy_frames['15m'][idx_15m-60+1 : idx_15m+1].tolist()
215
 
216
- logic_packet = {'symbol': sym, 'ohlcv_1h': ohlcv_1h, 'ohlcv_15m': ohlcv_15m, 'change_24h': 0.0}
217
- if len(ohlcv_1h) >= 24:
218
- p_now = ohlcv_1h[-1][4]; p_old = ohlcv_1h[-24][4]
219
- if p_old > 0: logic_packet['change_24h'] = ((p_now - p_old) / p_old) * 100
220
-
221
- logic_result = self.dm._apply_logic_tree(logic_packet)
222
- signal_type = logic_result.get('type', 'NONE')
223
- l1_score = logic_result.get('score', 0.0)
 
224
 
225
- if signal_type in ['BREAKOUT', 'REVERSAL']:
226
- ohlcv_data = {
227
- '1m': numpy_frames['1m'][idx_1m-500+1 : idx_1m+1].tolist(),
228
- '5m': numpy_frames['5m'][idx_5m-200+1 : idx_5m+1].tolist(),
229
- '15m': ohlcv_15m,
230
- '1h': ohlcv_1h,
231
- '4h': numpy_frames['4h'][idx_4h-100+1 : idx_4h+1].tolist(),
232
- '1d': numpy_frames['1d'][idx_1d-50+1 : idx_1d+1].tolist()
233
- }
234
- current_price = ohlcv_data['5m'][-1][4]
235
- real_titan = 0.5
236
-
237
- raw_data_for_proc = {'symbol': sym, 'ohlcv': ohlcv_data, 'current_price': current_price}
238
- try:
239
- proc_res = await self.proc.process_compound_signal(raw_data_for_proc)
240
- if proc_res: real_titan = proc_res.get('titan_score', 0.5)
241
- except: pass
242
-
243
- # 🔥 RISK PROFILING
244
- max_hydra_crash = 0.0
245
- max_hydra_giveback = 0.0
246
- max_legacy_v2 = 0.0
247
- max_legacy_v3 = 0.0
248
-
249
- hydra_crash_time = 0
250
- legacy_panic_time = 0
251
-
252
- trade_ctx = {
253
- 'entry_price': current_price, 'entry_time': str(current_time),
254
- 'volume_30m_usd': 1000000
255
- }
256
-
257
- future_limit = 240
258
- end_idx_1m = min(idx_1m + future_limit, len(numpy_frames['1m']) - 1)
259
- check_step = 10
260
- current_idx_1m = idx_1m
261
 
262
- while current_idx_1m < end_idx_1m:
263
- current_idx_1m += check_step
 
 
264
 
265
- future_1m_data = numpy_frames['1m'][current_idx_1m-1000+1 : current_idx_1m+1].tolist()
266
- future_5m_data = numpy_frames['5m'][idx_5m-300+1 : idx_5m+1].tolist()
267
- current_ts = int(numpy_frames['1m'][current_idx_1m][0])
268
 
269
- if self.proc.guardian_hydra:
270
- hydra_res = self.proc.guardian_hydra.analyze_position(sym, future_1m_data, future_5m_data, ohlcv_15m, trade_ctx)
271
- probs = hydra_res.get('probs', {})
272
- p_crash = probs.get('crash', 0.0)
273
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
274
  if p_crash > max_hydra_crash:
275
  max_hydra_crash = p_crash
276
  if p_crash > 0.6 and hydra_crash_time == 0: hydra_crash_time = current_ts
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
277
 
278
- if probs.get('giveback', 0) > max_hydra_giveback: max_hydra_giveback = probs.get('giveback', 0)
279
-
280
- if self.proc.guardian_legacy:
281
- legacy_res = self.proc.guardian_legacy.analyze_position(
282
- future_1m_data, future_5m_data, ohlcv_15m, current_price, volume_30m_usd=1000000
283
- )
284
- scores = legacy_res.get('scores', {})
285
- s_v2 = scores.get('v2', 0.0)
286
- s_v3 = scores.get('v3', 0.0)
287
-
288
- if s_v2 > max_legacy_v2:
289
- max_legacy_v2 = s_v2
290
- if s_v2 > 0.8 and legacy_panic_time == 0: legacy_panic_time = current_ts
291
-
292
- if s_v3 > max_legacy_v3: max_legacy_v3 = s_v3
293
-
294
- ts_aligned = int(current_time.timestamp() // 60) * 60 * 1000
295
- ai_results.append({
296
- 'timestamp': ts_aligned, 'symbol': sym, 'close': current_price,
297
- 'real_titan': real_titan, 'signal_type': signal_type, 'l1_score': l1_score,
298
- 'risk_hydra_crash': max_hydra_crash,
299
- 'time_hydra_crash': hydra_crash_time,
300
- 'risk_legacy_v2': max_legacy_v2,
301
- 'time_legacy_panic': legacy_panic_time
302
- })
303
 
304
  dt = time.time() - t0
305
  if ai_results:
306
  pd.DataFrame(ai_results).to_pickle(scores_file)
307
- print(f" 💾 [{sym}] Saved {len(ai_results)} profiled signals. ({dt:.1f}s)", flush=True)
308
  else:
309
  print(f" ⚠️ [{sym}] No signals.", flush=True)
310
 
311
- del numpy_frames, time_indices, df_1m, candles, frames
312
  gc.collect()
313
 
314
  async def generate_truth_data(self):
@@ -318,19 +365,15 @@ class HeavyDutyBacktester:
318
  start_time_ms = int(dt_start.timestamp() * 1000)
319
  end_time_ms = int(dt_end.timestamp() * 1000)
320
  print(f"\n🚜 [Phase 1] Processing Era: {self.force_start_date} -> {self.force_end_date}")
321
- else:
322
- return
323
 
324
  for sym in self.TARGET_COINS:
325
  try:
326
  candles = await self._fetch_all_data_fast(sym, start_time_ms, end_time_ms)
327
  if candles:
328
  await self._process_data_in_memory(sym, candles, start_time_ms, end_time_ms)
329
- else:
330
- print(f" ❌ Failed/Empty data for {sym}.", flush=True)
331
  except Exception as e:
332
  print(f" ❌ SKIP {sym}: {e}", flush=True)
333
- continue
334
  gc.collect()
335
 
336
  @staticmethod
@@ -353,6 +396,7 @@ class HeavyDutyBacktester:
353
  for config in combinations_batch:
354
  wallet = { "balance": initial_capital, "allocated": 0.0, "positions": {}, "trades_history": [] }
355
 
 
356
  w_titan = config['w_titan']; w_struct = config['w_struct']; entry_thresh = config['thresh']
357
  hydra_thresh = config['hydra_thresh']
358
  legacy_thresh = config['legacy_thresh']
@@ -565,7 +609,7 @@ class HeavyDutyBacktester:
565
  print(f" 🛡️ Guard: Hydra={best['config']['hydra_thresh']} | Legacy={best['config']['legacy_thresh']}")
566
  print("="*60)
567
  return best['config'], best
568
-
569
  async def run_strategic_optimization_task():
570
  print("\n🧪 [STRATEGIC BACKTEST] Smart Adaptive Grid Initiated...")
571
  r2 = R2Service()
 
1
  # ============================================================
2
+ # 🧪 backtest_engine.py (V104.0 - GEM-Architect: High Fidelity & Speed Injection)
3
  # ============================================================
4
 
5
  import asyncio
 
31
  def __init__(self, data_manager, processor):
32
  self.dm = data_manager
33
  self.proc = processor
34
+ # كثافة الشبكة للدخول
35
  self.GRID_DENSITY = 6
36
  self.INITIAL_CAPITAL = 10.0
37
  self.TRADING_FEES = 0.001
 
53
  self.force_end_date = None
54
 
55
  if not os.path.exists(CACHE_DIR): os.makedirs(CACHE_DIR)
56
+ print(f"🧪 [Backtest V104.0] High-Fidelity 1-Minute Scan (Speed Injected).")
57
 
58
  def set_date_range(self, start_str, end_str):
59
  self.force_start_date = start_str
60
  self.force_end_date = end_str
61
 
 
 
 
62
  # ==============================================================
63
+ # ⚡ FAST DATA DOWNLOADER
64
  # ==============================================================
65
  async def _fetch_all_data_fast(self, sym, start_ms, end_ms):
66
  print(f" ⚡ [Network] Downloading {sym}...", flush=True)
 
103
  print(f" ✅ Downloaded {len(unique_candles)} candles.", flush=True)
104
  return unique_candles
105
 
106
+ # ==============================================================
107
+ # 🏎️ VECTORIZED INDICATORS (Optimized for Hydra Inputs)
108
+ # ==============================================================
109
  def _calculate_indicators_vectorized(self, df):
110
+ # 1. Basic Setup
111
+ df['close'] = df['close'].astype(float)
112
+ df['high'] = df['high'].astype(float)
113
+ df['low'] = df['low'].astype(float)
114
+ df['volume'] = df['volume'].astype(float)
115
+
116
+ # 2. RSI & EMA
117
  delta = df['close'].diff()
118
  gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
119
  loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
 
122
  df['ema20'] = df['close'].ewm(span=20, adjust=False).mean()
123
  df['ema50'] = df['close'].ewm(span=50, adjust=False).mean()
124
 
125
+ # 3. ATR
126
  high_low = df['high'] - df['low']
127
  high_close = (df['high'] - df['close'].shift()).abs()
128
  low_close = (df['low'] - df['close'].shift()).abs()
129
  ranges = pd.concat([high_low, high_close, low_close], axis=1)
130
  true_range = ranges.max(axis=1)
131
  df['atr'] = true_range.rolling(14).mean()
132
+
133
+ # 4. 🔥 Hydra Specifics (Pre-calc for Speed)
134
+ sma20 = df['close'].rolling(20).mean()
135
+ std20 = df['close'].rolling(20).std()
136
+ df['bb_width'] = ((sma20 + 2*std20) - (sma20 - 2*std20)) / sma20
137
+
138
  df['vol_ma20'] = df['volume'].rolling(window=20).mean()
139
+ df['vol_ma50'] = df['volume'].rolling(window=50).mean()
140
+ df['rel_vol'] = df['volume'] / (df['vol_ma50'] + 1e-9)
141
+
142
  df.fillna(0, inplace=True)
143
  return df
144
 
145
+ # ==============================================================
146
+ # 🧠 CPU PROCESSING (1-Minute Scan)
147
+ # ==============================================================
148
  async def _process_data_in_memory(self, sym, candles, start_ms, end_ms):
149
  safe_sym = sym.replace('/', '_')
150
  period_suffix = f"{start_ms}_{end_ms}"
 
154
  print(f" 📂 [{sym}] Data Exists -> Skipping.")
155
  return
156
 
157
+ print(f" ⚙️ [CPU] Analyzing {sym} (1-Min Fidelity)...", flush=True)
158
  t0 = time.time()
159
 
160
  df_1m = pd.DataFrame(candles, columns=['timestamp', 'open', 'high', 'low', 'close', 'volume'])
 
 
161
  df_1m['datetime'] = pd.to_datetime(df_1m['timestamp'], unit='ms')
162
  df_1m.set_index('datetime', inplace=True)
163
  df_1m = df_1m.sort_index()
 
167
  time_indices = {}
168
  agg_dict = {'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'volume': 'sum'}
169
 
170
+ # --- 1. Pre-Calculate EVERYTHING (Vectorized) ---
171
  frames['1m'] = df_1m.copy()
172
  frames['1m']['timestamp'] = frames['1m'].index.floor('1min').astype(np.int64) // 10**6
173
+ frames['1m'] = self._calculate_indicators_vectorized(frames['1m'])
174
+
175
+ # Store critical columns in Numpy for O(1) Access
176
+ fast_1m = {
177
+ 'close': frames['1m']['close'].values,
178
+ 'rsi': frames['1m']['rsi'].values,
179
+ 'atr': frames['1m']['atr'].values,
180
+ 'bb_width': frames['1m']['bb_width'].values,
181
+ 'rel_vol': frames['1m']['rel_vol'].values,
182
+ 'timestamp': frames['1m']['timestamp'].values
183
+ }
184
+
185
  col_order = ['timestamp', 'open', 'high', 'low', 'close', 'volume']
186
  numpy_frames['1m'] = frames['1m'][col_order].values
187
  time_indices['1m'] = frames['1m'].index
 
196
  time_indices[tf_str] = resampled.index
197
 
198
  ai_results = []
 
199
 
200
+ # --- L1 Logic (Filter) ---
201
  df_5m_aligned = frames['5m'].copy()
202
  df_1h_aligned = frames['1h'].reindex(frames['5m'].index, method='ffill')
203
  df_15m_aligned = frames['15m'].reindex(frames['5m'].index, method='ffill')
 
217
 
218
  rsi_oversold = (df_1h_aligned['rsi'] >= 20) & (df_1h_aligned['rsi'] <= 40)
219
  price_drop = change_4h <= -2.0
 
 
 
220
  is_green = df_15m_aligned['close'] > df_15m_aligned['open']
221
+ is_reversal = filters_pass & rsi_oversold & price_drop & is_green
222
+
223
+ valid_indices = df_5m_aligned[is_breakout | is_reversal].index
 
 
224
  start_dt = df_1m.index[0] + pd.Timedelta(minutes=500)
225
  final_valid_indices = [t for t in valid_indices if t >= start_dt]
226
 
227
+ print(f" 🎯 Found {len(final_valid_indices)} signals. Running 1-Min Simulations...", flush=True)
228
+
229
+ # --- Hydra Model Access ---
230
+ hydra_models = {}
231
+ hydra_cols = []
232
+ if self.proc.guardian_hydra and self.proc.guardian_hydra.initialized:
233
+ hydra_models = self.proc.guardian_hydra.models
234
+ hydra_cols = self.proc.guardian_hydra.feature_cols
235
+
236
+ # --- 3. The Main Loop (Every Minute Check) ---
237
  for i, current_time in enumerate(final_valid_indices):
238
  idx_1m = time_indices['1m'].searchsorted(current_time, side='right') - 1
239
  idx_5m = time_indices['5m'].searchsorted(current_time, side='right') - 1
 
247
  ohlcv_1h = numpy_frames['1h'][idx_1h-60+1 : idx_1h+1].tolist()
248
  ohlcv_15m = numpy_frames['15m'][idx_15m-60+1 : idx_15m+1].tolist()
249
 
250
+ # Logic Classification (Simplified)
251
+ sig_type = 'BREAKOUT' if is_breakout[current_time] else 'REVERSAL'
252
+ l1_score = 100.0 if sig_type == 'REVERSAL' else 20.0
253
+
254
+ # 🔥 RISK PROFILING (Every 1 Minute)
255
+ max_hydra_crash = 0.0
256
+ max_hydra_giveback = 0.0
257
+ max_legacy_v2 = 0.0
258
+ max_legacy_v3 = 0.0
259
 
260
+ hydra_crash_time = 0
261
+ legacy_panic_time = 0
262
+
263
+ entry_price = fast_1m['close'][idx_1m]
264
+ highest_price = entry_price
265
+
266
+ # Scan next 4 hours (240 minutes), STEP = 1 minute (Live System Fidelity)
267
+ future_limit = 240
268
+ end_idx_1m = min(idx_1m + future_limit, len(fast_1m['close']) - 1)
269
+
270
+ # Pre-lookup HTF RSIs to avoid heavy lookups every minute
271
+ rsi_5m_val = frames['5m']['rsi'].asof(current_time)
272
+ rsi_15m_val = frames['15m']['rsi'].asof(current_time)
273
+
274
+ # Loop minute by minute
275
+ for current_idx_1m in range(idx_1m + 1, end_idx_1m + 1):
276
+ curr_price = fast_1m['close'][current_idx_1m]
277
+ if curr_price > highest_price: highest_price = curr_price
278
+ current_ts = int(fast_1m['timestamp'][current_idx_1m])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
279
 
280
+ # 🐉 A. Hydra Injection (Every Minute)
281
+ if hydra_models:
282
+ atr_val = fast_1m['atr'][current_idx_1m]
283
+ sl_dist = 1.5 * atr_val if atr_val > 0 else entry_price * 0.015
284
 
285
+ pnl_r = (curr_price - entry_price) / sl_dist if sl_dist > 0 else 0
286
+ max_pnl_r = (highest_price - entry_price) / sl_dist if sl_dist > 0 else 0
 
287
 
288
+ row_dict = {
289
+ 'rsi_1m': fast_1m['rsi'][current_idx_1m],
290
+ 'rsi_5m': rsi_5m_val,
291
+ 'rsi_15m': rsi_15m_val,
292
+ 'bb_width': fast_1m['bb_width'][current_idx_1m],
293
+ 'rel_vol': fast_1m['rel_vol'][current_idx_1m],
294
+ 'dist_ema20_1h': 0.0,
295
+ 'atr_pct': atr_val / curr_price if curr_price > 0 else 0,
296
+ 'norm_pnl_r': pnl_r,
297
+ 'max_pnl_r': max_pnl_r,
298
+ 'dist_tp_atr': 0.0, 'dist_sl_atr': 0.0,
299
+ 'time_in_trade': (current_idx_1m - idx_1m),
300
+ 'entry_type': 0.0, 'oracle_conf': 0.8, 'l2_score': 0.7, 'target_class': 3.0
301
+ }
302
+
303
+ vec = [row_dict.get(c, 0.0) for c in hydra_cols]
304
+ vec_np = np.array(vec).reshape(1, -1)
305
+
306
+ try:
307
+ p_crash = hydra_models['crash'].predict_proba(vec_np)[0][1]
308
  if p_crash > max_hydra_crash:
309
  max_hydra_crash = p_crash
310
  if p_crash > 0.6 and hydra_crash_time == 0: hydra_crash_time = current_ts
311
+ except: pass
312
+
313
+ try:
314
+ p_give = hydra_models['giveback'].predict_proba(vec_np)[0][1]
315
+ if p_give > max_hydra_giveback: max_hydra_giveback = p_give
316
+ except: pass
317
+
318
+ # 🕸️ B. Legacy Check (Every Minute)
319
+ if self.proc.guardian_legacy:
320
+ # Update 5m index based on time (approximate but fast)
321
+ curr_5m_idx = idx_5m + (current_idx_1m - idx_1m) // 5
322
+ if curr_5m_idx >= len(numpy_frames['5m']): curr_5m_idx = len(numpy_frames['5m']) - 1
323
+
324
+ f_1m = numpy_frames['1m'][current_idx_1m-60+1 : current_idx_1m+1].tolist()
325
+ f_5m = numpy_frames['5m'][curr_5m_idx-30+1 : curr_5m_idx+1].tolist()
326
+
327
+ legacy_res = self.proc.guardian_legacy.analyze_position(
328
+ f_1m, f_5m, ohlcv_15m, entry_price, volume_30m_usd=1000000
329
+ )
330
+ scores = legacy_res.get('scores', {})
331
+ s_v2 = scores.get('v2', 0.0)
332
+ s_v3 = scores.get('v3', 0.0)
333
+
334
+ if s_v2 > max_legacy_v2:
335
+ max_legacy_v2 = s_v2
336
+ if s_v2 > 0.8 and legacy_panic_time == 0: legacy_panic_time = current_ts
337
 
338
+ if s_v3 > max_legacy_v3: max_legacy_v3 = s_v3
339
+
340
+ ts_aligned = int(current_time.timestamp() // 60) * 60 * 1000
341
+ ai_results.append({
342
+ 'timestamp': ts_aligned, 'symbol': sym, 'close': entry_price,
343
+ 'real_titan': 0.5,
344
+ 'signal_type': sig_type, 'l1_score': l1_score,
345
+ 'risk_hydra_crash': max_hydra_crash,
346
+ 'time_hydra_crash': hydra_crash_time,
347
+ 'risk_legacy_v2': max_legacy_v2,
348
+ 'time_legacy_panic': legacy_panic_time
349
+ })
 
 
 
 
 
 
 
 
 
 
 
 
 
350
 
351
  dt = time.time() - t0
352
  if ai_results:
353
  pd.DataFrame(ai_results).to_pickle(scores_file)
354
+ print(f" 💾 [{sym}] Done in {dt:.2f}s. Saved {len(ai_results)} signals.", flush=True)
355
  else:
356
  print(f" ⚠️ [{sym}] No signals.", flush=True)
357
 
358
+ del numpy_frames, time_indices, df_1m, candles, frames, fast_1m
359
  gc.collect()
360
 
361
  async def generate_truth_data(self):
 
365
  start_time_ms = int(dt_start.timestamp() * 1000)
366
  end_time_ms = int(dt_end.timestamp() * 1000)
367
  print(f"\n🚜 [Phase 1] Processing Era: {self.force_start_date} -> {self.force_end_date}")
368
+ else: return
 
369
 
370
  for sym in self.TARGET_COINS:
371
  try:
372
  candles = await self._fetch_all_data_fast(sym, start_time_ms, end_time_ms)
373
  if candles:
374
  await self._process_data_in_memory(sym, candles, start_time_ms, end_time_ms)
 
 
375
  except Exception as e:
376
  print(f" ❌ SKIP {sym}: {e}", flush=True)
 
377
  gc.collect()
378
 
379
  @staticmethod
 
396
  for config in combinations_batch:
397
  wallet = { "balance": initial_capital, "allocated": 0.0, "positions": {}, "trades_history": [] }
398
 
399
+ # Configs
400
  w_titan = config['w_titan']; w_struct = config['w_struct']; entry_thresh = config['thresh']
401
  hydra_thresh = config['hydra_thresh']
402
  legacy_thresh = config['legacy_thresh']
 
609
  print(f" 🛡️ Guard: Hydra={best['config']['hydra_thresh']} | Legacy={best['config']['legacy_thresh']}")
610
  print("="*60)
611
  return best['config'], best
612
+
613
  async def run_strategic_optimization_task():
614
  print("\n🧪 [STRATEGIC BACKTEST] Smart Adaptive Grid Initiated...")
615
  r2 = R2Service()