Riy777 commited on
Commit
f8af8be
·
verified ·
1 Parent(s): 879205b

Update backtest_engine.py

Browse files
Files changed (1) hide show
  1. backtest_engine.py +195 -113
backtest_engine.py CHANGED
@@ -1,5 +1,5 @@
1
  # ============================================================
2
- # 🧪 backtest_engine.py (V104.0 - GEM-Architect: High Fidelity & Speed Injection)
3
  # ============================================================
4
 
5
  import asyncio
@@ -21,6 +21,7 @@ try:
21
  from learning_hub.adaptive_hub import StrategyDNA, AdaptiveHub
22
  from r2 import R2Service
23
  import ccxt.async_support as ccxt
 
24
  except ImportError:
25
  pass
26
 
@@ -31,7 +32,6 @@ class HeavyDutyBacktester:
31
  def __init__(self, data_manager, processor):
32
  self.dm = data_manager
33
  self.proc = processor
34
- # كثافة الشبكة للدخول
35
  self.GRID_DENSITY = 6
36
  self.INITIAL_CAPITAL = 10.0
37
  self.TRADING_FEES = 0.001
@@ -53,7 +53,7 @@ class HeavyDutyBacktester:
53
  self.force_end_date = None
54
 
55
  if not os.path.exists(CACHE_DIR): os.makedirs(CACHE_DIR)
56
- print(f"🧪 [Backtest V104.0] High-Fidelity 1-Minute Scan (Speed Injected).")
57
 
58
  def set_date_range(self, start_str, end_str):
59
  self.force_start_date = start_str
@@ -104,7 +104,7 @@ class HeavyDutyBacktester:
104
  return unique_candles
105
 
106
  # ==============================================================
107
- # 🏎️ VECTORIZED INDICATORS (Optimized for Hydra Inputs)
108
  # ==============================================================
109
  def _calculate_indicators_vectorized(self, df):
110
  # 1. Basic Setup
@@ -121,6 +121,7 @@ class HeavyDutyBacktester:
121
  df['rsi'] = 100 - (100 / (1 + rs))
122
  df['ema20'] = df['close'].ewm(span=20, adjust=False).mean()
123
  df['ema50'] = df['close'].ewm(span=50, adjust=False).mean()
 
124
 
125
  # 3. ATR
126
  high_low = df['high'] - df['low']
@@ -130,20 +131,43 @@ class HeavyDutyBacktester:
130
  true_range = ranges.max(axis=1)
131
  df['atr'] = true_range.rolling(14).mean()
132
 
133
- # 4. 🔥 Hydra Specifics (Pre-calc for Speed)
134
  sma20 = df['close'].rolling(20).mean()
135
  std20 = df['close'].rolling(20).std()
136
  df['bb_width'] = ((sma20 + 2*std20) - (sma20 - 2*std20)) / sma20
137
-
138
  df['vol_ma20'] = df['volume'].rolling(window=20).mean()
139
  df['vol_ma50'] = df['volume'].rolling(window=50).mean()
140
  df['rel_vol'] = df['volume'] / (df['vol_ma50'] + 1e-9)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
 
142
  df.fillna(0, inplace=True)
143
  return df
144
 
145
  # ==============================================================
146
- # 🧠 CPU PROCESSING (1-Minute Scan)
147
  # ==============================================================
148
  async def _process_data_in_memory(self, sym, candles, start_ms, end_ms):
149
  safe_sym = sym.replace('/', '_')
@@ -154,7 +178,7 @@ class HeavyDutyBacktester:
154
  print(f" 📂 [{sym}] Data Exists -> Skipping.")
155
  return
156
 
157
- print(f" ⚙️ [CPU] Analyzing {sym} (1-Min Fidelity)...", flush=True)
158
  t0 = time.time()
159
 
160
  df_1m = pd.DataFrame(candles, columns=['timestamp', 'open', 'high', 'low', 'close', 'volume'])
@@ -162,12 +186,10 @@ class HeavyDutyBacktester:
162
  df_1m.set_index('datetime', inplace=True)
163
  df_1m = df_1m.sort_index()
164
 
 
165
  frames = {}
166
- numpy_frames = {}
167
- time_indices = {}
168
  agg_dict = {'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'volume': 'sum'}
169
 
170
- # --- 1. Pre-Calculate EVERYTHING (Vectorized) ---
171
  frames['1m'] = df_1m.copy()
172
  frames['1m']['timestamp'] = frames['1m'].index.floor('1min').astype(np.int64) // 10**6
173
  frames['1m'] = self._calculate_indicators_vectorized(frames['1m'])
@@ -179,25 +201,44 @@ class HeavyDutyBacktester:
179
  'atr': frames['1m']['atr'].values,
180
  'bb_width': frames['1m']['bb_width'].values,
181
  'rel_vol': frames['1m']['rel_vol'].values,
182
- 'timestamp': frames['1m']['timestamp'].values
 
 
 
 
 
 
 
 
 
 
183
  }
184
 
185
- col_order = ['timestamp', 'open', 'high', 'low', 'close', 'volume']
186
- numpy_frames['1m'] = frames['1m'][col_order].values
187
- time_indices['1m'] = frames['1m'].index
188
-
189
  for tf_str, tf_code in [('5m', '5T'), ('15m', '15T'), ('1h', '1h'), ('4h', '4h'), ('1d', '1D')]:
190
  resampled = df_1m.resample(tf_code).agg(agg_dict).dropna()
191
- if tf_str in ['15m', '1h']:
192
  resampled = self._calculate_indicators_vectorized(resampled)
193
  resampled['timestamp'] = resampled.index.astype(np.int64) // 10**6
194
  frames[tf_str] = resampled
195
- numpy_frames[tf_str] = resampled[col_order].values
196
- time_indices[tf_str] = resampled.index
197
-
198
- ai_results = []
199
-
200
- # --- L1 Logic (Filter) ---
 
 
 
 
 
 
 
 
 
 
 
201
  df_5m_aligned = frames['5m'].copy()
202
  df_1h_aligned = frames['1h'].reindex(frames['5m'].index, method='ffill')
203
  df_15m_aligned = frames['15m'].reindex(frames['5m'].index, method='ffill')
@@ -206,142 +247,185 @@ class HeavyDutyBacktester:
206
  cond_not_pump = change_4h <= 8.0
207
  cond_rsi_1h_safe = df_1h_aligned['rsi'] <= 70
208
  deviation = (df_1h_aligned['close'] - df_1h_aligned['ema20']) / df_1h_aligned['atr']
209
- cond_deviation_safe = deviation <= 1.8
210
- filters_pass = cond_not_pump & cond_rsi_1h_safe & cond_deviation_safe
211
 
212
- bullish_1h = (df_1h_aligned['ema20'] > df_1h_aligned['ema50']) | (df_1h_aligned['close'] > df_1h_aligned['ema20'])
213
- rsi_1h_ok = (df_1h_aligned['rsi'] >= 45) & (df_1h_aligned['rsi'] <= 68)
214
- close_above_ema_15m = df_15m_aligned['close'] >= df_15m_aligned['ema20']
215
- vol_spike_15m = df_15m_aligned['volume'] >= (1.5 * df_15m_aligned['vol_ma20'])
216
- is_breakout = filters_pass & bullish_1h & rsi_1h_ok & close_above_ema_15m & vol_spike_15m
217
-
218
- rsi_oversold = (df_1h_aligned['rsi'] >= 20) & (df_1h_aligned['rsi'] <= 40)
219
- price_drop = change_4h <= -2.0
220
- is_green = df_15m_aligned['close'] > df_15m_aligned['open']
221
- is_reversal = filters_pass & rsi_oversold & price_drop & is_green
222
 
223
  valid_indices = df_5m_aligned[is_breakout | is_reversal].index
224
  start_dt = df_1m.index[0] + pd.Timedelta(minutes=500)
225
  final_valid_indices = [t for t in valid_indices if t >= start_dt]
226
 
227
- print(f" 🎯 Found {len(final_valid_indices)} signals. Running 1-Min Simulations...", flush=True)
228
 
229
- # --- Hydra Model Access ---
 
230
  hydra_models = {}
231
  hydra_cols = []
232
  if self.proc.guardian_hydra and self.proc.guardian_hydra.initialized:
233
  hydra_models = self.proc.guardian_hydra.models
234
  hydra_cols = self.proc.guardian_hydra.feature_cols
235
-
236
- # --- 3. The Main Loop (Every Minute Check) ---
237
- for i, current_time in enumerate(final_valid_indices):
238
- idx_1m = time_indices['1m'].searchsorted(current_time, side='right') - 1
239
- idx_5m = time_indices['5m'].searchsorted(current_time, side='right') - 1
240
- idx_15m = time_indices['15m'].searchsorted(current_time, side='right') - 1
241
- idx_1h = time_indices['1h'].searchsorted(current_time, side='right') - 1
242
- idx_4h = time_indices['4h'].searchsorted(current_time, side='right') - 1
243
- idx_1d = time_indices['1d'].searchsorted(current_time, side='right') - 1
244
 
245
- if idx_1m < 500 or idx_4h < 100: continue
 
 
 
 
 
 
 
 
 
 
 
246
 
247
- ohlcv_1h = numpy_frames['1h'][idx_1h-60+1 : idx_1h+1].tolist()
248
- ohlcv_15m = numpy_frames['15m'][idx_15m-60+1 : idx_15m+1].tolist()
 
 
249
 
250
- # Logic Classification (Simplified)
251
- sig_type = 'BREAKOUT' if is_breakout[current_time] else 'REVERSAL'
252
- l1_score = 100.0 if sig_type == 'REVERSAL' else 20.0
253
-
254
- # 🔥 RISK PROFILING (Every 1 Minute)
255
- max_hydra_crash = 0.0
256
- max_hydra_giveback = 0.0
257
- max_legacy_v2 = 0.0
258
- max_legacy_v3 = 0.0
259
-
260
- hydra_crash_time = 0
261
- legacy_panic_time = 0
262
 
263
  entry_price = fast_1m['close'][idx_1m]
264
  highest_price = entry_price
265
 
266
- # Scan next 4 hours (240 minutes), STEP = 1 minute (Live System Fidelity)
267
- future_limit = 240
268
- end_idx_1m = min(idx_1m + future_limit, len(fast_1m['close']) - 1)
269
 
270
- # Pre-lookup HTF RSIs to avoid heavy lookups every minute
271
- rsi_5m_val = frames['5m']['rsi'].asof(current_time)
272
- rsi_15m_val = frames['15m']['rsi'].asof(current_time)
273
 
274
  # Loop minute by minute
275
- for current_idx_1m in range(idx_1m + 1, end_idx_1m + 1):
276
- curr_price = fast_1m['close'][current_idx_1m]
277
  if curr_price > highest_price: highest_price = curr_price
278
- current_ts = int(fast_1m['timestamp'][current_idx_1m])
279
 
280
- # 🐉 A. Hydra Injection (Every Minute)
281
  if hydra_models:
282
- atr_val = fast_1m['atr'][current_idx_1m]
283
  sl_dist = 1.5 * atr_val if atr_val > 0 else entry_price * 0.015
284
-
285
  pnl_r = (curr_price - entry_price) / sl_dist if sl_dist > 0 else 0
286
  max_pnl_r = (highest_price - entry_price) / sl_dist if sl_dist > 0 else 0
287
 
 
 
 
 
 
 
 
 
288
  row_dict = {
289
- 'rsi_1m': fast_1m['rsi'][current_idx_1m],
290
- 'rsi_5m': rsi_5m_val,
291
- 'rsi_15m': rsi_15m_val,
292
- 'bb_width': fast_1m['bb_width'][current_idx_1m],
293
- 'rel_vol': fast_1m['rel_vol'][current_idx_1m],
294
  'dist_ema20_1h': 0.0,
295
  'atr_pct': atr_val / curr_price if curr_price > 0 else 0,
296
- 'norm_pnl_r': pnl_r,
297
- 'max_pnl_r': max_pnl_r,
298
- 'dist_tp_atr': 0.0, 'dist_sl_atr': 0.0,
299
- 'time_in_trade': (current_idx_1m - idx_1m),
300
  'entry_type': 0.0, 'oracle_conf': 0.8, 'l2_score': 0.7, 'target_class': 3.0
301
  }
302
-
303
- vec = [row_dict.get(c, 0.0) for c in hydra_cols]
304
- vec_np = np.array(vec).reshape(1, -1)
305
 
306
  try:
307
- p_crash = hydra_models['crash'].predict_proba(vec_np)[0][1]
308
  if p_crash > max_hydra_crash:
309
  max_hydra_crash = p_crash
310
- if p_crash > 0.6 and hydra_crash_time == 0: hydra_crash_time = current_ts
311
  except: pass
312
-
313
  try:
314
- p_give = hydra_models['giveback'].predict_proba(vec_np)[0][1]
315
  if p_give > max_hydra_giveback: max_hydra_giveback = p_give
316
  except: pass
317
 
318
- # 🕸️ B. Legacy Check (Every Minute)
319
- if self.proc.guardian_legacy:
320
- # Update 5m index based on time (approximate but fast)
321
- curr_5m_idx = idx_5m + (current_idx_1m - idx_1m) // 5
322
- if curr_5m_idx >= len(numpy_frames['5m']): curr_5m_idx = len(numpy_frames['5m']) - 1
323
-
324
- f_1m = numpy_frames['1m'][current_idx_1m-60+1 : current_idx_1m+1].tolist()
325
- f_5m = numpy_frames['5m'][curr_5m_idx-30+1 : curr_5m_idx+1].tolist()
326
-
327
- legacy_res = self.proc.guardian_legacy.analyze_position(
328
- f_1m, f_5m, ohlcv_15m, entry_price, volume_30m_usd=1000000
329
- )
330
- scores = legacy_res.get('scores', {})
331
- s_v2 = scores.get('v2', 0.0)
332
- s_v3 = scores.get('v3', 0.0)
333
-
334
- if s_v2 > max_legacy_v2:
335
- max_legacy_v2 = s_v2
336
- if s_v2 > 0.8 and legacy_panic_time == 0: legacy_panic_time = current_ts
337
 
338
- if s_v3 > max_legacy_v3: max_legacy_v3 = s_v3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
339
 
340
  ts_aligned = int(current_time.timestamp() // 60) * 60 * 1000
 
 
 
 
341
  ai_results.append({
342
  'timestamp': ts_aligned, 'symbol': sym, 'close': entry_price,
343
- 'real_titan': 0.5,
344
- 'signal_type': sig_type, 'l1_score': l1_score,
345
  'risk_hydra_crash': max_hydra_crash,
346
  'time_hydra_crash': hydra_crash_time,
347
  'risk_legacy_v2': max_legacy_v2,
@@ -619,7 +703,6 @@ async def run_strategic_optimization_task():
619
  await dm.initialize()
620
  await proc.initialize()
621
 
622
- # ✅ Activate Silent Mode for Hydra during Backtest
623
  if proc.guardian_hydra:
624
  proc.guardian_hydra.set_silent_mode(True)
625
  print(" 🔇 [Hydra] Silent Mode: ACTIVATED for Backtest.")
@@ -640,7 +723,6 @@ async def run_strategic_optimization_task():
640
  best_config, best_stats = await optimizer.run_optimization(target_regime=target)
641
  if best_config and best_stats:
642
  hub.submit_challenger(target, best_config, best_stats)
643
-
644
  await hub._save_state_to_r2()
645
  hub._inject_current_parameters()
646
  print(f"✅ [System] ALL DNA Updated & Saved Successfully.")
 
1
  # ============================================================
2
+ # 🧪 backtest_engine.py (V105.0 - GEM-Architect: ULTIMATE SPEED)
3
  # ============================================================
4
 
5
  import asyncio
 
21
  from learning_hub.adaptive_hub import StrategyDNA, AdaptiveHub
22
  from r2 import R2Service
23
  import ccxt.async_support as ccxt
24
+ import xgboost as xgb # Required for Direct Injection
25
  except ImportError:
26
  pass
27
 
 
32
  def __init__(self, data_manager, processor):
33
  self.dm = data_manager
34
  self.proc = processor
 
35
  self.GRID_DENSITY = 6
36
  self.INITIAL_CAPITAL = 10.0
37
  self.TRADING_FEES = 0.001
 
53
  self.force_end_date = None
54
 
55
  if not os.path.exists(CACHE_DIR): os.makedirs(CACHE_DIR)
56
+ print(f"🧪 [Backtest V105.0] Ultimate Speed (Hydra + Legacy V2/V3 Injection).")
57
 
58
  def set_date_range(self, start_str, end_str):
59
  self.force_start_date = start_str
 
104
  return unique_candles
105
 
106
  # ==============================================================
107
+ # 🏎️ VECTORIZED INDICATORS (ALL-IN-ONE)
108
  # ==============================================================
109
  def _calculate_indicators_vectorized(self, df):
110
  # 1. Basic Setup
 
121
  df['rsi'] = 100 - (100 / (1 + rs))
122
  df['ema20'] = df['close'].ewm(span=20, adjust=False).mean()
123
  df['ema50'] = df['close'].ewm(span=50, adjust=False).mean()
124
+ df['ema200'] = df['close'].ewm(span=200, adjust=False).mean() # For V3
125
 
126
  # 3. ATR
127
  high_low = df['high'] - df['low']
 
131
  true_range = ranges.max(axis=1)
132
  df['atr'] = true_range.rolling(14).mean()
133
 
134
+ # 4. 🔥 Hydra Specifics
135
  sma20 = df['close'].rolling(20).mean()
136
  std20 = df['close'].rolling(20).std()
137
  df['bb_width'] = ((sma20 + 2*std20) - (sma20 - 2*std20)) / sma20
 
138
  df['vol_ma20'] = df['volume'].rolling(window=20).mean()
139
  df['vol_ma50'] = df['volume'].rolling(window=50).mean()
140
  df['rel_vol'] = df['volume'] / (df['vol_ma50'] + 1e-9)
141
+
142
+ # 5. 🕸️ Legacy V2/V3 Specifics (Pre-calc)
143
+ # Log Returns
144
+ df['log_ret'] = np.log(df['close'] / df['close'].shift(1))
145
+
146
+ # Fib Position (Rolling Min/Max 50)
147
+ roll_max = df['high'].rolling(50).max()
148
+ roll_min = df['low'].rolling(50).min()
149
+ diff = (roll_max - roll_min).replace(0, 1e-9)
150
+ df['fib_pos'] = (df['close'] - roll_min) / diff
151
+
152
+ # Trend Slope (EMA change)
153
+ df['trend_slope'] = (df['ema20'] - df['ema20'].shift(5)) / df['ema20'].shift(5)
154
+
155
+ # Volatility (ATR/Close)
156
+ df['volatility'] = df['atr'] / df['close']
157
+
158
+ # Fib 618 Distance (For V3/Legacy)
159
+ fib618 = roll_max - (diff * 0.382)
160
+ df['dist_fib618'] = (df['close'] - fib618) / df['close']
161
+
162
+ # EMA Distances (For V3)
163
+ df['dist_ema50'] = (df['close'] - df['ema50']) / df['close']
164
+ df['dist_ema200'] = (df['close'] - df['ema200']) / df['close']
165
 
166
  df.fillna(0, inplace=True)
167
  return df
168
 
169
  # ==============================================================
170
+ # 🧠 CPU PROCESSING (Full Injection Mode)
171
  # ==============================================================
172
  async def _process_data_in_memory(self, sym, candles, start_ms, end_ms):
173
  safe_sym = sym.replace('/', '_')
 
178
  print(f" 📂 [{sym}] Data Exists -> Skipping.")
179
  return
180
 
181
+ print(f" ⚙️ [CPU] Analyzing {sym} (Full Speed Injection)...", flush=True)
182
  t0 = time.time()
183
 
184
  df_1m = pd.DataFrame(candles, columns=['timestamp', 'open', 'high', 'low', 'close', 'volume'])
 
186
  df_1m.set_index('datetime', inplace=True)
187
  df_1m = df_1m.sort_index()
188
 
189
+ # --- 1. Pre-Calculate EVERYTHING (Vectorized) ---
190
  frames = {}
 
 
191
  agg_dict = {'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'volume': 'sum'}
192
 
 
193
  frames['1m'] = df_1m.copy()
194
  frames['1m']['timestamp'] = frames['1m'].index.floor('1min').astype(np.int64) // 10**6
195
  frames['1m'] = self._calculate_indicators_vectorized(frames['1m'])
 
201
  'atr': frames['1m']['atr'].values,
202
  'bb_width': frames['1m']['bb_width'].values,
203
  'rel_vol': frames['1m']['rel_vol'].values,
204
+ 'timestamp': frames['1m']['timestamp'].values,
205
+ # Legacy Feats
206
+ 'log_ret': frames['1m']['log_ret'].values,
207
+ 'fib_pos': frames['1m']['fib_pos'].values,
208
+ 'volatility': frames['1m']['volatility'].values,
209
+ 'trend_slope': frames['1m']['trend_slope'].values,
210
+ 'dist_fib618': frames['1m']['dist_fib618'].values,
211
+ 'ema50': frames['1m']['ema50'].values,
212
+ 'ema200': frames['1m']['ema200'].values,
213
+ 'dist_ema50': frames['1m']['dist_ema50'].values,
214
+ 'dist_ema200': frames['1m']['dist_ema200'].values,
215
  }
216
 
217
+ # HTF
218
+ numpy_htf = {}
 
 
219
  for tf_str, tf_code in [('5m', '5T'), ('15m', '15T'), ('1h', '1h'), ('4h', '4h'), ('1d', '1D')]:
220
  resampled = df_1m.resample(tf_code).agg(agg_dict).dropna()
221
+ if tf_str in ['5m', '15m', '1h']:
222
  resampled = self._calculate_indicators_vectorized(resampled)
223
  resampled['timestamp'] = resampled.index.astype(np.int64) // 10**6
224
  frames[tf_str] = resampled
225
+
226
+ # Store Numpy for HTF lookups
227
+ numpy_htf[tf_str] = {
228
+ 'close': resampled['close'].values,
229
+ 'rsi': resampled['rsi'].values,
230
+ 'log_ret': resampled['log_ret'].values,
231
+ 'fib_pos': resampled['fib_pos'].values,
232
+ 'trend_slope': resampled['trend_slope'].values,
233
+ 'dist_fib618': resampled['dist_fib618'].values,
234
+ 'ema50': resampled['ema50'].values,
235
+ 'ema200': resampled['ema200'].values,
236
+ 'dist_ema50': resampled['dist_ema50'].values,
237
+ 'dist_ema200': resampled['dist_ema200'].values,
238
+ 'timestamp': resampled['timestamp'].values
239
+ }
240
+
241
+ # --- L1 Filter ---
242
  df_5m_aligned = frames['5m'].copy()
243
  df_1h_aligned = frames['1h'].reindex(frames['5m'].index, method='ffill')
244
  df_15m_aligned = frames['15m'].reindex(frames['5m'].index, method='ffill')
 
247
  cond_not_pump = change_4h <= 8.0
248
  cond_rsi_1h_safe = df_1h_aligned['rsi'] <= 70
249
  deviation = (df_1h_aligned['close'] - df_1h_aligned['ema20']) / df_1h_aligned['atr']
250
+ filters_pass = cond_not_pump & cond_rsi_1h_safe & (deviation <= 1.8)
 
251
 
252
+ is_breakout = filters_pass & ((df_1h_aligned['ema20'] > df_1h_aligned['ema50']) | (df_1h_aligned['close'] > df_1h_aligned['ema20']))
253
+ is_reversal = filters_pass & (df_1h_aligned['rsi'].between(20, 40)) & (change_4h <= -2.0)
 
 
 
 
 
 
 
 
254
 
255
  valid_indices = df_5m_aligned[is_breakout | is_reversal].index
256
  start_dt = df_1m.index[0] + pd.Timedelta(minutes=500)
257
  final_valid_indices = [t for t in valid_indices if t >= start_dt]
258
 
259
+ print(f" 🎯 Found {len(final_valid_indices)} signals. Running High-Fidelity Sim...", flush=True)
260
 
261
+ # --- Prepare Models ---
262
+ # 1. Hydra
263
  hydra_models = {}
264
  hydra_cols = []
265
  if self.proc.guardian_hydra and self.proc.guardian_hydra.initialized:
266
  hydra_models = self.proc.guardian_hydra.models
267
  hydra_cols = self.proc.guardian_hydra.feature_cols
 
 
 
 
 
 
 
 
 
268
 
269
+ # 2. Legacy V2/V3 (Direct Access)
270
+ legacy_v2_model = None
271
+ legacy_v3_model = None
272
+ v3_feat_names = []
273
+ if self.proc.guardian_legacy and self.proc.guardian_legacy.initialized:
274
+ legacy_v2_model = self.proc.guardian_legacy.model_v2
275
+ legacy_v3_model = self.proc.guardian_legacy.model_v3
276
+ v3_feat_names = self.proc.guardian_legacy.v3_feature_names
277
+
278
+ # --- 3. The Main Loop ---
279
+ for i, current_time in enumerate(final_valid_indices):
280
+ ts_val = int(current_time.timestamp() * 1000)
281
 
282
+ # Binary Search Indices
283
+ idx_1m = np.searchsorted(fast_1m['timestamp'], ts_val)
284
+ idx_5m = np.searchsorted(numpy_htf['5m']['timestamp'], ts_val)
285
+ idx_15m = np.searchsorted(numpy_htf['15m']['timestamp'], ts_val)
286
 
287
+ if idx_1m < 500 or idx_1m >= len(fast_1m['close']) - 240: continue
 
 
 
 
 
 
 
 
 
 
 
288
 
289
  entry_price = fast_1m['close'][idx_1m]
290
  highest_price = entry_price
291
 
292
+ max_hydra_crash = 0.0; max_hydra_giveback = 0.0; hydra_crash_time = 0
293
+ max_legacy_v2 = 0.0; max_legacy_v3 = 0.0; legacy_panic_time = 0
 
294
 
295
+ end_idx_1m = min(idx_1m + 240, len(fast_1m['close']) - 1)
 
 
296
 
297
  # Loop minute by minute
298
+ for c_idx in range(idx_1m + 1, end_idx_1m + 1):
299
+ curr_price = fast_1m['close'][c_idx]
300
  if curr_price > highest_price: highest_price = curr_price
301
+ curr_ts = int(fast_1m['timestamp'][c_idx])
302
 
303
+ # --- A. HYDRA INJECTION ---
304
  if hydra_models:
305
+ atr_val = fast_1m['atr'][c_idx]
306
  sl_dist = 1.5 * atr_val if atr_val > 0 else entry_price * 0.015
 
307
  pnl_r = (curr_price - entry_price) / sl_dist if sl_dist > 0 else 0
308
  max_pnl_r = (highest_price - entry_price) / sl_dist if sl_dist > 0 else 0
309
 
310
+ # HTF Lookups (Constant or Nearest)
311
+ # For speed, we use values at entry or nearest aligned.
312
+ # Simple scaling: c_idx maps to c_idx // 5 for 5m approx
313
+ c_5m = idx_5m + (c_idx - idx_1m) // 5
314
+ c_15m = idx_15m + (c_idx - idx_1m) // 15
315
+ if c_5m >= len(numpy_htf['5m']['rsi']): c_5m = len(numpy_htf['5m']['rsi']) - 1
316
+ if c_15m >= len(numpy_htf['15m']['rsi']): c_15m = len(numpy_htf['15m']['rsi']) - 1
317
+
318
  row_dict = {
319
+ 'rsi_1m': fast_1m['rsi'][c_idx],
320
+ 'rsi_5m': numpy_htf['5m']['rsi'][c_5m],
321
+ 'rsi_15m': numpy_htf['15m']['rsi'][c_15m],
322
+ 'bb_width': fast_1m['bb_width'][c_idx],
323
+ 'rel_vol': fast_1m['rel_vol'][c_idx],
324
  'dist_ema20_1h': 0.0,
325
  'atr_pct': atr_val / curr_price if curr_price > 0 else 0,
326
+ 'norm_pnl_r': pnl_r, 'max_pnl_r': max_pnl_r,
327
+ 'time_in_trade': (c_idx - idx_1m),
 
 
328
  'entry_type': 0.0, 'oracle_conf': 0.8, 'l2_score': 0.7, 'target_class': 3.0
329
  }
330
+ vec = np.array([row_dict.get(c, 0.0) for c in hydra_cols]).reshape(1, -1)
 
 
331
 
332
  try:
333
+ p_crash = hydra_models['crash'].predict_proba(vec)[0][1]
334
  if p_crash > max_hydra_crash:
335
  max_hydra_crash = p_crash
336
+ if p_crash > 0.6 and hydra_crash_time == 0: hydra_crash_time = curr_ts
337
  except: pass
 
338
  try:
339
+ p_give = hydra_models['giveback'].predict_proba(vec)[0][1]
340
  if p_give > max_hydra_giveback: max_hydra_giveback = p_give
341
  except: pass
342
 
343
+ # --- B. LEGACY INJECTION (V2/V3) ---
344
+ if legacy_v2_model or legacy_v3_model:
345
+ # Update HTF indices
346
+ c_5m = idx_5m + (c_idx - idx_1m) // 5
347
+ c_15m = idx_15m + (c_idx - idx_1m) // 15
348
+ if c_5m >= len(numpy_htf['5m']['close']): c_5m = len(numpy_htf['5m']['close']) - 1
349
+ if c_15m >= len(numpy_htf['15m']['close']): c_15m = len(numpy_htf['15m']['close']) - 1
350
+
351
+ # V2 Construction
352
+ if legacy_v2_model:
353
+ # V2 needs: [1m_feats, 5m_feats, 15m_feats, LAGS...]
354
+ # 1m Feats: log_ret, rsi, fib_pos, volatility
355
+ f1 = [fast_1m['log_ret'][c_idx], fast_1m['rsi'][c_idx]/100.0, fast_1m['fib_pos'][c_idx], fast_1m['volatility'][c_idx]]
356
+ # 5m Feats: log_ret, rsi, fib_pos, trend_slope
357
+ f5 = [numpy_htf['5m']['log_ret'][c_5m], numpy_htf['5m']['rsi'][c_5m]/100.0, numpy_htf['5m']['fib_pos'][c_5m], numpy_htf['5m']['trend_slope'][c_5m]]
358
+ # 15m Feats: log_ret, rsi, dist_fib618, trend_slope
359
+ f15 = [numpy_htf['15m']['log_ret'][c_15m], numpy_htf['15m']['rsi'][c_15m]/100.0, numpy_htf['15m']['dist_fib618'][c_15m], numpy_htf['15m']['trend_slope'][c_15m]]
 
 
360
 
361
+ vec_v2 = f1 + f5 + f15
362
+
363
+ # Add Lags (1, 2, 3, 5, 10, 20)
364
+ lags = [1, 2, 3, 5, 10, 20]
365
+ for lag in lags:
366
+ l_idx = c_idx - lag
367
+ if l_idx >= 0:
368
+ lag_row = [fast_1m['log_ret'][l_idx], fast_1m['rsi'][l_idx]/100.0, fast_1m['fib_pos'][l_idx], fast_1m['volatility'][l_idx]]
369
+ vec_v2.extend(lag_row)
370
+ else:
371
+ vec_v2.extend([0.0, 0.5, 0.5, 0.0])
372
+
373
+ try:
374
+ # XGB Predict
375
+ dm = xgb.DMatrix(np.array(vec_v2).reshape(1, -1))
376
+ pred = legacy_v2_model.predict(dm)
377
+ p_v2 = float(pred[0][2]) if len(pred.shape)>1 else float(pred[0])
378
+
379
+ if p_v2 > max_legacy_v2:
380
+ max_legacy_v2 = p_v2
381
+ if p_v2 > 0.8 and legacy_panic_time == 0: legacy_panic_time = curr_ts
382
+ except: pass
383
+
384
+ # V3 Construction (DataFrame)
385
+ if legacy_v3_model and v3_feat_names:
386
+ # V3 uses a DataFrame with specific column names
387
+ # We reconstruct the dict
388
+ # Feats: rsi, dist_ema50, dist_ema200, log_ret (for 1m, 5m, 15m)
389
+ v3_dict = {}
390
+ # 1m
391
+ v3_dict['rsi'] = fast_1m['rsi'][c_idx]
392
+ v3_dict['dist_ema50'] = fast_1m['dist_ema50'][c_idx]
393
+ v3_dict['dist_ema200'] = fast_1m['dist_ema200'][c_idx]
394
+ v3_dict['log_ret'] = fast_1m['log_ret'][c_idx]
395
+ # 5m
396
+ v3_dict['rsi_5m'] = numpy_htf['5m']['rsi'][c_5m]
397
+ v3_dict['dist_ema50_5m'] = numpy_htf['5m']['dist_ema50'][c_5m]
398
+ v3_dict['dist_ema200_5m'] = numpy_htf['5m']['dist_ema200'][c_5m]
399
+ v3_dict['log_ret_5m'] = numpy_htf['5m']['log_ret'][c_5m]
400
+ # 15m
401
+ v3_dict['rsi_15m'] = numpy_htf['15m']['rsi'][c_15m]
402
+ v3_dict['dist_ema50_15m'] = numpy_htf['15m']['dist_ema50'][c_15m]
403
+ v3_dict['dist_ema200_15m'] = numpy_htf['15m']['dist_ema200'][c_15m]
404
+ v3_dict['log_ret_15m'] = numpy_htf['15m']['log_ret'][c_15m]
405
+
406
+ # Build ordered DF
407
+ try:
408
+ df_v3 = pd.DataFrame(columns=v3_feat_names)
409
+ # Fill efficiently
410
+ vals = [v3_dict.get(n, 0.0) for n in v3_feat_names]
411
+ df_v3.loc[0] = vals
412
+ df_v3 = df_v3.astype(float)
413
+
414
+ dm_v3 = xgb.DMatrix(df_v3)
415
+ pred_v3 = legacy_v3_model.predict(dm_v3)
416
+ p_v3 = float(pred_v3[0])
417
+
418
+ if p_v3 > max_legacy_v3: max_legacy_v3 = p_v3
419
+ except: pass
420
 
421
  ts_aligned = int(current_time.timestamp() // 60) * 60 * 1000
422
+ # Logic Classification
423
+ sig_type = 'BREAKOUT' if is_breakout[current_time] else 'REVERSAL'
424
+ l1_score = 100.0 if sig_type == 'REVERSAL' else 20.0
425
+
426
  ai_results.append({
427
  'timestamp': ts_aligned, 'symbol': sym, 'close': entry_price,
428
+ 'real_titan': 0.5, 'signal_type': sig_type, 'l1_score': l1_score,
 
429
  'risk_hydra_crash': max_hydra_crash,
430
  'time_hydra_crash': hydra_crash_time,
431
  'risk_legacy_v2': max_legacy_v2,
 
703
  await dm.initialize()
704
  await proc.initialize()
705
 
 
706
  if proc.guardian_hydra:
707
  proc.guardian_hydra.set_silent_mode(True)
708
  print(" 🔇 [Hydra] Silent Mode: ACTIVATED for Backtest.")
 
723
  best_config, best_stats = await optimizer.run_optimization(target_regime=target)
724
  if best_config and best_stats:
725
  hub.submit_challenger(target, best_config, best_stats)
 
726
  await hub._save_state_to_r2()
727
  hub._inject_current_parameters()
728
  print(f"✅ [System] ALL DNA Updated & Saved Successfully.")