Riy777 commited on
Commit
3be23d3
·
verified ·
1 Parent(s): 1e3c170

Update backtest_engine.py

Browse files
Files changed (1) hide show
  1. backtest_engine.py +146 -135
backtest_engine.py CHANGED
@@ -1,5 +1,5 @@
1
  # ============================================================
2
- # 🧪 backtest_engine.py (V95.0 - GEM-Architect: Vectorized Sonar)
3
  # ============================================================
4
 
5
  import asyncio
@@ -36,7 +36,7 @@ class HeavyDutyBacktester:
36
  self.TRADING_FEES = 0.001
37
  self.MAX_SLOTS = 4
38
 
39
- # القائمة الكاملة
40
  self.TARGET_COINS = [
41
  'SOL/USDT', 'XRP/USDT', 'DOGE/USDT', 'ADA/USDT', 'AVAX/USDT', 'LINK/USDT',
42
  'TON/USDT', 'INJ/USDT', 'APT/USDT', 'OP/USDT', 'ARB/USDT', 'SUI/USDT',
@@ -53,7 +53,7 @@ class HeavyDutyBacktester:
53
  self.force_end_date = None
54
 
55
  if not os.path.exists(CACHE_DIR): os.makedirs(CACHE_DIR)
56
- print(f"🧪 [Backtest V95.0] Vectorized Sonar (Skipping Noise).")
57
 
58
  def set_date_range(self, start_str, end_str):
59
  self.force_start_date = start_str
@@ -63,32 +63,6 @@ class HeavyDutyBacktester:
63
  if df.empty: return []
64
  return df[['timestamp', 'open', 'high', 'low', 'close', 'volume']].values.tolist()
65
 
66
- # ==============================================================
67
- # 🚀 VECTORIZED INDICATORS (The Sonar)
68
- # ==============================================================
69
- def _calculate_sonar_indicators(self, df):
70
- """
71
- حساب مؤشرات سريعة جداً (Vectorized) لكامل البيانات دفعة واحدة.
72
- الهدف: تحديد المناطق "الميتة" لتجاهلها.
73
- """
74
- # 1. RSI Calculation (Manual Numpy for Speed)
75
- delta = df['close'].diff()
76
- gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
77
- loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
78
- rs = gain / loss
79
- df['sonar_rsi'] = 100 - (100 / (1 + rs))
80
-
81
- # 2. Volume MA
82
- df['sonar_vol_ma'] = df['volume'].rolling(window=20).mean()
83
-
84
- # 3. Bollinger Band Width (Volatility check)
85
- # sma20 = df['close'].rolling(window=20).mean()
86
- # std = df['close'].rolling(window=20).std()
87
- # df['sonar_bb_width'] = (std * 2) / sma20 # عرض القناة كنسبة
88
-
89
- df.fillna(0, inplace=True)
90
- return df
91
-
92
  # ==============================================================
93
  # ⚡ FAST DATA DOWNLOADER
94
  # ==============================================================
@@ -130,11 +104,44 @@ class HeavyDutyBacktester:
130
  unique_candles.append(c)
131
  seen.add(c[0])
132
  unique_candles.sort(key=lambda x: x[0])
133
- print(f" ✅ Downloaded {len(unique_candles)} candles for {sym}.", flush=True)
134
  return unique_candles
135
 
136
  # ==============================================================
137
- # 🧠 CPU PROCESSING (SONAR + NUMPY)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  # ==============================================================
139
  async def _process_data_in_memory(self, sym, candles, start_ms, end_ms):
140
  safe_sym = sym.replace('/', '_')
@@ -145,7 +152,7 @@ class HeavyDutyBacktester:
145
  print(f" 📂 [{sym}] Data Exists -> Skipping.")
146
  return
147
 
148
- print(f" ⚙️ [CPU] Processing {sym}...", flush=True)
149
  t0 = time.time()
150
 
151
  # 1. Prepare Pandas
@@ -156,13 +163,13 @@ class HeavyDutyBacktester:
156
  df_1m.set_index('datetime', inplace=True)
157
  df_1m = df_1m.sort_index()
158
 
159
- # 2. Resample (Base Frames)
160
  frames = {}
161
  numpy_frames = {}
162
  time_indices = {}
163
  agg_dict = {'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'volume': 'sum'}
164
 
165
- # 1m
166
  frames['1m'] = df_1m.copy()
167
  frames['1m']['timestamp'] = frames['1m'].index.floor('1min').astype(np.int64) // 10**6
168
  col_order = ['timestamp', 'open', 'high', 'low', 'close', 'volume']
@@ -173,89 +180,119 @@ class HeavyDutyBacktester:
173
  for tf_str, tf_code in [('5m', '5T'), ('15m', '15T'), ('1h', '1h'), ('4h', '4h'), ('1d', '1D')]:
174
  resampled = df_1m.resample(tf_code).agg(agg_dict).dropna()
175
 
176
- # --- 🚀 SONAR INJECTION ---
177
- # نحسب المؤشرات على فريم الـ 15m و 1h (الفريمات المستخدمة في L1)
178
  if tf_str in ['15m', '1h']:
179
- resampled = self._calculate_sonar_indicators(resampled)
180
- # --------------------------
181
-
182
  resampled['timestamp'] = resampled.index.astype(np.int64) // 10**6
183
- frames[tf_str] = resampled # Keep for sonar check
184
- numpy_frames[tf_str] = resampled[col_order].values
185
  time_indices[tf_str] = resampled.index
186
 
187
  ai_results = []
188
  valid_idx_5m = time_indices['5m']
189
- start_dt = valid_idx_5m[0] + pd.Timedelta(minutes=500)
190
- start_pos = valid_idx_5m.searchsorted(start_dt)
191
- total_steps = len(valid_idx_5m)
192
 
193
- # مصفوفات السونار للوصول السريع
194
- # نحتاج الوصول لقيم RSI و Volume بسرعة بدون Pandas Loop
195
- # سنستخدم مؤشرات الوقت للربط
196
 
197
- # --- THE TURBO LOOP ---
198
- for i in range(start_pos, len(valid_idx_5m)):
199
- current_time = valid_idx_5m[i]
200
-
201
- if i % (total_steps // 4) == 0:
202
- print(f" 🚀 Processing: {int((i/total_steps)*100)}%...", flush=True)
203
-
204
- # --- 📡 SONAR CHECK (The Accelerator) ---
205
- # نتحقق من فريم 1h أولاً (لأنه الأبطأ والأهم)
206
- idx_1h = time_indices['1h'].searchsorted(current_time, side='right') - 1
207
- if idx_1h < 60: continue
208
-
209
- # قراءة قيم السونار من الذاكرة (Pandas Access is okay here as it's single row via iloc/fast access or numpy)
210
- # لكن الأسرع هو قراءتها من الـ Numpy إذا جهزناها، أو من الـ DataFrame مباشرة.
211
- # للسرعة القصوى، سنفترض أننا نصل للصف في الـ DataFrame.
212
-
213
- row_1h = frames['1h'].iloc[idx_1h]
214
- sonar_rsi_1h = row_1h['sonar_rsi']
215
-
216
- # قاعدة السونار:
217
- # إذا RSI بين 35 و 65، فهذا غالباً نطاق عرضي ممل لا يستحق التشغيل الكامل
218
- # إلا إذا كان هناك انفجار في الفوليوم.
219
- # ملاحظة: نحن نريد "Wide Net" لذلك سنكون متساهلين، لكن ليس أغبياء.
220
- # نتجاهل فقط المناطق "الميتة تماماً".
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
221
 
222
- is_interesting = False
 
223
 
224
- # 1. RSI Extreme (Reversal potential)
225
- if sonar_rsi_1h < 42 or sonar_rsi_1h > 58:
226
- is_interesting = True
 
 
 
227
 
228
- # 2. Volume Spike (Breakout potential)
229
- # if row_1h['volume'] > row_1h['sonar_vol_ma'] * 1.2:
230
- # is_interesting = True
231
 
232
- # إذا لم يكن مثيراً للاهتمام في الـ 1h، نتحقق من الـ 15m (للأحداث السريعة)
233
- if not is_interesting:
234
- idx_15m = time_indices['15m'].searchsorted(current_time, side='right') - 1
235
- if idx_15m >= 60:
236
- row_15m = frames['15m'].iloc[idx_15m]
237
- if row_15m['sonar_rsi'] < 35 or row_15m['sonar_rsi'] > 65:
238
- is_interesting = True
239
- # if row_15m['volume'] > row_15m['sonar_vol_ma'] * 1.5:
240
- # is_interesting = True
241
-
242
- # 🛑 SKIP if boring
243
- if not is_interesting:
244
- continue
245
-
246
- # --- إذا وصلنا هنا، فالشمعة تستحق التحليل ---
247
 
248
- # قص البيانات (Slicing) - كما في V94
249
- idx_15m = time_indices['15m'].searchsorted(current_time, side='right') - 1
250
  ohlcv_1h = numpy_frames['1h'][idx_1h-60+1 : idx_1h+1].tolist()
251
  ohlcv_15m = numpy_frames['15m'][idx_15m-60+1 : idx_15m+1].tolist()
252
 
253
- logic_packet = {
254
- 'symbol': sym,
255
- 'ohlcv_1h': ohlcv_1h,
256
- 'ohlcv_15m': ohlcv_15m,
257
- 'change_24h': 0.0
258
- }
259
  if len(ohlcv_1h) >= 24:
260
  p_now = ohlcv_1h[-1][4]; p_old = ohlcv_1h[-24][4]
261
  if p_old > 0: logic_packet['change_24h'] = ((p_now - p_old) / p_old) * 100
@@ -265,13 +302,6 @@ class HeavyDutyBacktester:
265
  l1_score = logic_result.get('score', 0.0)
266
 
267
  if signal_type in ['BREAKOUT', 'REVERSAL']:
268
- idx_1m = time_indices['1m'].searchsorted(current_time, side='right') - 1
269
- idx_5m = i
270
- idx_4h = time_indices['4h'].searchsorted(current_time, side='right') - 1
271
- idx_1d = time_indices['1d'].searchsorted(current_time, side='right') - 1
272
-
273
- if idx_1m < 500 or idx_4h < 100: continue
274
-
275
  ohlcv_data = {
276
  '1m': numpy_frames['1m'][idx_1m-500+1 : idx_1m+1].tolist(),
277
  '5m': numpy_frames['5m'][idx_5m-200+1 : idx_5m+1].tolist(),
@@ -280,7 +310,6 @@ class HeavyDutyBacktester:
280
  '4h': numpy_frames['4h'][idx_4h-100+1 : idx_4h+1].tolist(),
281
  '1d': numpy_frames['1d'][idx_1d-50+1 : idx_1d+1].tolist()
282
  }
283
-
284
  current_price = ohlcv_data['5m'][-1][4]
285
  real_titan = 0.5
286
 
@@ -291,22 +320,17 @@ class HeavyDutyBacktester:
291
  except: pass
292
 
293
  ts_aligned = int(current_time.timestamp() // 60) * 60 * 1000
294
-
295
  ai_results.append({
296
- 'timestamp': ts_aligned,
297
- 'symbol': sym,
298
- 'close': current_price,
299
- 'real_titan': real_titan,
300
- 'signal_type': signal_type,
301
- 'l1_score': l1_score
302
  })
303
 
304
  dt = time.time() - t0
305
  if ai_results:
306
  pd.DataFrame(ai_results).to_pickle(scores_file)
307
- print(f" 💾 [{sym}] Saved {len(ai_results)} candidates. (Processed in {dt:.1f}s)", flush=True)
308
  else:
309
- print(f" ⚠️ [{sym}] No interesting candidates.", flush=True)
310
 
311
  del numpy_frames, time_indices, df_1m, candles, frames
312
  gc.collect()
@@ -320,7 +344,7 @@ class HeavyDutyBacktester:
320
  dt_end = datetime.strptime(self.force_end_date, "%Y-%m-%d").replace(tzinfo=timezone.utc)
321
  start_time_ms = int(dt_start.timestamp() * 1000)
322
  end_time_ms = int(dt_end.timestamp() * 1000)
323
- print(f"\n🚜 [Phase 1] Processing Era: {self.force_start_date} -> {self.force_end_date}")
324
  else:
325
  return
326
 
@@ -330,9 +354,9 @@ class HeavyDutyBacktester:
330
  if candles:
331
  await self._process_data_in_memory(sym, candles, start_time_ms, end_time_ms)
332
  else:
333
- print(f" ❌ Failed/Empty data for {sym}. Continuing...", flush=True)
334
  except Exception as e:
335
- print(f" ❌ SKIP: Error processing {sym}: {e}", flush=True)
336
  continue
337
  gc.collect()
338
 
@@ -497,27 +521,14 @@ class HeavyDutyBacktester:
497
 
498
  print("\n" + "="*60)
499
  print(f"🏆 CHAMPION REPORT [{target_regime}]:")
500
- print(f" 📅 Period: {self.force_start_date} -> {self.force_end_date}")
501
  print(f" 💰 Final Balance: ${best['final_balance']:,.2f}")
502
- print(f" 🚀 Net PnL: ${best['net_profit']:,.2f}")
503
- print("-" * 60)
504
- print(f" 📊 Total Trades: {best['total_trades']}")
505
- print(f" ✅ Winning Trades: {best['win_count']}")
506
- print(f" ❌ Losing Trades: {best['loss_count']}")
507
  print(f" 📈 Win Rate: {best['win_rate']:.1f}%")
508
- print("-" * 60)
509
- print(f" 🟢 Max Single Win: ${best['max_single_win']:.2f}")
510
- print(f" 🔴 Max Single Loss: ${best['max_single_loss']:.2f}")
511
- print(f" 🔥 Max Win Streak: {best['max_win_streak']} trades")
512
- print(f" 🧊 Max Loss Streak: {best['max_loss_streak']} trades")
513
- print(f" 📉 Max Drawdown: {best['max_drawdown']:.1f}%")
514
- print("-" * 60)
515
  print(f" ⚙️ Config: Titan={best['config']['w_titan']} | Struct={best['config']['w_struct']} | Thresh={best['config']['thresh']}")
516
  print("="*60)
517
  return best['config'], best
518
 
519
  async def run_strategic_optimization_task():
520
- print("\n🧪 [STRATEGIC BACKTEST] Vectorized Sonar Mode Initiated...")
521
  r2 = R2Service()
522
  dm = DataManager(None, None, r2)
523
  proc = MLProcessor(dm)
 
1
  # ============================================================
2
+ # 🧪 backtest_engine.py (V96.0 - GEM-Architect: Vectorized Logic Mirror)
3
  # ============================================================
4
 
5
  import asyncio
 
36
  self.TRADING_FEES = 0.001
37
  self.MAX_SLOTS = 4
38
 
39
+ # القائمة الكاملة (50 عملة)
40
  self.TARGET_COINS = [
41
  'SOL/USDT', 'XRP/USDT', 'DOGE/USDT', 'ADA/USDT', 'AVAX/USDT', 'LINK/USDT',
42
  'TON/USDT', 'INJ/USDT', 'APT/USDT', 'OP/USDT', 'ARB/USDT', 'SUI/USDT',
 
53
  self.force_end_date = None
54
 
55
  if not os.path.exists(CACHE_DIR): os.makedirs(CACHE_DIR)
56
+ print(f"🧪 [Backtest V96.0] Vectorized Logic Mirror (Exact L1 Simulation).")
57
 
58
  def set_date_range(self, start_str, end_str):
59
  self.force_start_date = start_str
 
63
  if df.empty: return []
64
  return df[['timestamp', 'open', 'high', 'low', 'close', 'volume']].values.tolist()
65
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  # ==============================================================
67
  # ⚡ FAST DATA DOWNLOADER
68
  # ==============================================================
 
104
  unique_candles.append(c)
105
  seen.add(c[0])
106
  unique_candles.sort(key=lambda x: x[0])
107
+ print(f" ✅ Downloaded {len(unique_candles)} candles.", flush=True)
108
  return unique_candles
109
 
110
  # ==============================================================
111
+ # 🏎️ VECTORIZED INDICATOR CALCULATION
112
+ # ==============================================================
113
+ def _calculate_indicators_vectorized(self, df):
114
+ """
115
+ حساب المؤشرات الفنية لكامل البيانات دفعة واحدة باستخدام Pandas Vectorization.
116
+ هذا يطابق منطق DataManager._calc_indicators بالضبط ولكن أسرع بـ 1000 مرة.
117
+ """
118
+ # RSI
119
+ delta = df['close'].diff()
120
+ gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
121
+ loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
122
+ rs = gain / loss
123
+ df['rsi'] = 100 - (100 / (1 + rs))
124
+
125
+ # EMA
126
+ df['ema20'] = df['close'].ewm(span=20, adjust=False).mean()
127
+ df['ema50'] = df['close'].ewm(span=50, adjust=False).mean()
128
+
129
+ # ATR (Simplified Vectorized)
130
+ high_low = df['high'] - df['low']
131
+ high_close = (df['high'] - df['close'].shift()).abs()
132
+ low_close = (df['low'] - df['close'].shift()).abs()
133
+ ranges = pd.concat([high_low, high_close, low_close], axis=1)
134
+ true_range = ranges.max(axis=1)
135
+ df['atr'] = true_range.rolling(14).mean()
136
+
137
+ # Volume MA
138
+ df['vol_ma20'] = df['volume'].rolling(window=20).mean()
139
+
140
+ df.fillna(0, inplace=True)
141
+ return df
142
+
143
+ # ==============================================================
144
+ # 🧠 CPU PROCESSING (VECTORIZED LOGIC MIRROR)
145
  # ==============================================================
146
  async def _process_data_in_memory(self, sym, candles, start_ms, end_ms):
147
  safe_sym = sym.replace('/', '_')
 
152
  print(f" 📂 [{sym}] Data Exists -> Skipping.")
153
  return
154
 
155
+ print(f" ⚙️ [CPU] Analyzing {sym}...", flush=True)
156
  t0 = time.time()
157
 
158
  # 1. Prepare Pandas
 
163
  df_1m.set_index('datetime', inplace=True)
164
  df_1m = df_1m.sort_index()
165
 
166
+ # 2. Resample & Calculate Indicators (ONCE)
167
  frames = {}
168
  numpy_frames = {}
169
  time_indices = {}
170
  agg_dict = {'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'volume': 'sum'}
171
 
172
+ # 1m Setup
173
  frames['1m'] = df_1m.copy()
174
  frames['1m']['timestamp'] = frames['1m'].index.floor('1min').astype(np.int64) // 10**6
175
  col_order = ['timestamp', 'open', 'high', 'low', 'close', 'volume']
 
180
  for tf_str, tf_code in [('5m', '5T'), ('15m', '15T'), ('1h', '1h'), ('4h', '4h'), ('1d', '1D')]:
181
  resampled = df_1m.resample(tf_code).agg(agg_dict).dropna()
182
 
183
+ # 🔥🔥 Calculate Indicators HERE (Vectorized) 🔥🔥
 
184
  if tf_str in ['15m', '1h']:
185
+ resampled = self._calculate_indicators_vectorized(resampled)
186
+
 
187
  resampled['timestamp'] = resampled.index.astype(np.int64) // 10**6
188
+ frames[tf_str] = resampled
189
+ numpy_frames[tf_str] = resampled[col_order].values # Raw data for passing
190
  time_indices[tf_str] = resampled.index
191
 
192
  ai_results = []
193
  valid_idx_5m = time_indices['5m']
 
 
 
194
 
195
+ # 3. 🔥 VECTORIZED LOGIC MATCHING (The Speed Force) 🔥
196
+ # بدلاً من حلقة تكرار عمياء، نجد "أماكن الاهتمام" فوراً باستخدام المنطق البولياني
197
+ # هذا يطابق شروط DataManager._apply_logic_tree حرفياً
198
 
199
+ # نحتاج لمطابقة وقت الـ 5m مع الـ 1h و 15m
200
+ # سنقوم بعمل reindex للـ 1h و 15m ليتطابق مع الـ 5m (Forward Fill)
201
+ # هذا يسمح لنا بمقارنة الأعمدة مباشرة
202
+
203
+ df_5m_aligned = frames['5m'].copy()
204
+
205
+ # دمج بيانات الـ 1h مع الـ 5m (Matching times)
206
+ df_1h_aligned = frames['1h'].reindex(frames['5m'].index, method='ffill')
207
+ df_15m_aligned = frames['15m'].reindex(frames['5m'].index, method='ffill')
208
+
209
+ # --- تطبيق شروط L1 (Breakout & Reversal) ---
210
+
211
+ # الشروط المشتركة (Common Filters from V15.2)
212
+ # 1. 4H Change calculation (approx from 1H data)
213
+ # shift(4) في فريم الساعة يقابل shift(48) في فريم 5 دقائق (تقريباً)
214
+ # للأمان نست��دم بيانات الساعة المحاذية
215
+ change_4h = ((df_1h_aligned['close'] - df_1h_aligned['close'].shift(4)) / df_1h_aligned['close'].shift(4)) * 100
216
+
217
+ # فلتر: ممنوع أكثر من 8% صعود في 4 ساعات
218
+ cond_not_pump = change_4h <= 8.0
219
+ # فلتر: RSI 1H ممنوع فوق 70
220
+ cond_rsi_1h_safe = df_1h_aligned['rsi'] <= 70
221
+ # فلتر: الامتداد (Deviation)
222
+ deviation = (df_1h_aligned['close'] - df_1h_aligned['ema20']) / df_1h_aligned['atr']
223
+ cond_deviation_safe = deviation <= 1.8
224
+
225
+ filters_pass = cond_not_pump & cond_rsi_1h_safe & cond_deviation_safe
226
+
227
+ # --- Breakout Logic ---
228
+ # 1. Bullish Structure (1H)
229
+ bullish_1h = (df_1h_aligned['ema20'] > df_1h_aligned['ema50']) | (df_1h_aligned['close'] > df_1h_aligned['ema20'])
230
+ # 2. RSI 1H Room (45-68)
231
+ rsi_1h_ok = (df_1h_aligned['rsi'] >= 45) & (df_1h_aligned['rsi'] <= 68)
232
+ # 3. 15M Close > EMA20
233
+ close_above_ema_15m = df_15m_aligned['close'] >= df_15m_aligned['ema20']
234
+ # 4. Volume 15M Spike
235
+ vol_spike_15m = df_15m_aligned['volume'] >= (1.5 * df_15m_aligned['vol_ma20'])
236
+
237
+ is_breakout = filters_pass & bullish_1h & rsi_1h_ok & close_above_ema_15m & vol_spike_15m
238
+
239
+ # --- Reversal Logic ---
240
+ # 1. RSI 1H Oversold (20-40)
241
+ rsi_oversold = (df_1h_aligned['rsi'] >= 20) & (df_1h_aligned['rsi'] <= 40)
242
+ # 2. Drop in price (change_4h <= -2)
243
+ price_drop = change_4h <= -2.0
244
+ # 3. Hammer/Rejection on 15M (Vectorized Approximation)
245
+ # Hammer: Lower wick > 1.5 * Body
246
+ body = (df_15m_aligned['close'] - df_15m_aligned['open']).abs()
247
+ lower_wick = df_15m_aligned[['open', 'close']].min(axis=1) - df_15m_aligned['low']
248
+ is_hammer = lower_wick > (body * 1.5)
249
+ is_green = df_15m_aligned['close'] > df_15m_aligned['open']
250
+
251
+ is_reversal = filters_pass & rsi_oversold & price_drop & (is_hammer | is_green)
252
+
253
+ # --- Combined Mask ---
254
+ # هذه هي اللحظات التي تستحق التحليل فقط!
255
+ valid_mask = is_breakout | is_reversal
256
+ valid_indices = df_5m_aligned[valid_mask].index
257
+
258
+ # --------------------------------------------------------
259
+
260
+ # 4. Loop ONLY on Valid Indices (The massive speedup)
261
+ # بدلاً من 129,000 لفة، سنجد ربما 2,000 - 5,000 لفة فقط.
262
+
263
+ start_dt = df_1m.index[0] + pd.Timedelta(minutes=500)
264
+ final_valid_indices = [t for t in valid_indices if t >= start_dt]
265
+
266
+ total_hits = len(final_valid_indices)
267
+ print(f" 🎯 Found {total_hits} potential setups. Running Titan...", flush=True)
268
+
269
+ for i, current_time in enumerate(final_valid_indices):
270
+ # قص البيانات (Slicing) لتمريرها للنماذج
271
+ # نستخدم searchsorted للسرعة القصوى
272
 
273
+ # نحتاج تحويل timestamp الـ index إلى مكان في الـ numpy arrays
274
+ # ملاحظة: time_indices['1m'] مرتب، لذا searchsorted يعمل
275
 
276
+ idx_1m = time_indices['1m'].searchsorted(current_time, side='right') - 1
277
+ idx_5m = time_indices['5m'].searchsorted(current_time, side='right') - 1
278
+ idx_15m = time_indices['15m'].searchsorted(current_time, side='right') - 1
279
+ idx_1h = time_indices['1h'].searchsorted(current_time, side='right') - 1
280
+ idx_4h = time_indices['4h'].searchsorted(current_time, side='right') - 1
281
+ idx_1d = time_indices['1d'].searchsorted(current_time, side='right') - 1
282
 
283
+ if idx_1m < 500 or idx_4h < 100: continue
 
 
284
 
285
+ # استخراج نوع الإشارة (لأننا دمجناهم في valid_mask)
286
+ # نعيد التحقق السريع لنعرف النوع
287
+ # ملاحظة: الوصول هنا سريع جداً لأننا نعرف التوقيت
288
+ # أو يمكننا الاعتماد على أن DataManager سيعيد النوع الصحيح
 
 
 
 
 
 
 
 
 
 
 
289
 
290
+ # نجهز الـ Packet ونرسلها لـ DataManager للتأكيد النهائي واستخراج السكور
291
+ # هذا يضمن التطابق 100%
292
  ohlcv_1h = numpy_frames['1h'][idx_1h-60+1 : idx_1h+1].tolist()
293
  ohlcv_15m = numpy_frames['15m'][idx_15m-60+1 : idx_15m+1].tolist()
294
 
295
+ logic_packet = {'symbol': sym, 'ohlcv_1h': ohlcv_1h, 'ohlcv_15m': ohlcv_15m, 'change_24h': 0.0}
 
 
 
 
 
296
  if len(ohlcv_1h) >= 24:
297
  p_now = ohlcv_1h[-1][4]; p_old = ohlcv_1h[-24][4]
298
  if p_old > 0: logic_packet['change_24h'] = ((p_now - p_old) / p_old) * 100
 
302
  l1_score = logic_result.get('score', 0.0)
303
 
304
  if signal_type in ['BREAKOUT', 'REVERSAL']:
 
 
 
 
 
 
 
305
  ohlcv_data = {
306
  '1m': numpy_frames['1m'][idx_1m-500+1 : idx_1m+1].tolist(),
307
  '5m': numpy_frames['5m'][idx_5m-200+1 : idx_5m+1].tolist(),
 
310
  '4h': numpy_frames['4h'][idx_4h-100+1 : idx_4h+1].tolist(),
311
  '1d': numpy_frames['1d'][idx_1d-50+1 : idx_1d+1].tolist()
312
  }
 
313
  current_price = ohlcv_data['5m'][-1][4]
314
  real_titan = 0.5
315
 
 
320
  except: pass
321
 
322
  ts_aligned = int(current_time.timestamp() // 60) * 60 * 1000
 
323
  ai_results.append({
324
+ 'timestamp': ts_aligned, 'symbol': sym, 'close': current_price,
325
+ 'real_titan': real_titan, 'signal_type': signal_type, 'l1_score': l1_score
 
 
 
 
326
  })
327
 
328
  dt = time.time() - t0
329
  if ai_results:
330
  pd.DataFrame(ai_results).to_pickle(scores_file)
331
+ print(f" 💾 [{sym}] Saved {len(ai_results)} verified signals. (Compute: {dt:.1f}s)", flush=True)
332
  else:
333
+ print(f" ⚠️ [{sym}] No signals.", flush=True)
334
 
335
  del numpy_frames, time_indices, df_1m, candles, frames
336
  gc.collect()
 
344
  dt_end = datetime.strptime(self.force_end_date, "%Y-%m-%d").replace(tzinfo=timezone.utc)
345
  start_time_ms = int(dt_start.timestamp() * 1000)
346
  end_time_ms = int(dt_end.timestamp() * 1000)
347
+ print(f"\n🚜 [Phase 1] Era: {self.force_start_date} -> {self.force_end_date}")
348
  else:
349
  return
350
 
 
354
  if candles:
355
  await self._process_data_in_memory(sym, candles, start_time_ms, end_time_ms)
356
  else:
357
+ print(f" ❌ Failed/Empty data for {sym}.", flush=True)
358
  except Exception as e:
359
+ print(f" ❌ SKIP {sym}: {e}", flush=True)
360
  continue
361
  gc.collect()
362
 
 
521
 
522
  print("\n" + "="*60)
523
  print(f"🏆 CHAMPION REPORT [{target_regime}]:")
 
524
  print(f" 💰 Final Balance: ${best['final_balance']:,.2f}")
 
 
 
 
 
525
  print(f" 📈 Win Rate: {best['win_rate']:.1f}%")
 
 
 
 
 
 
 
526
  print(f" ⚙️ Config: Titan={best['config']['w_titan']} | Struct={best['config']['w_struct']} | Thresh={best['config']['thresh']}")
527
  print("="*60)
528
  return best['config'], best
529
 
530
  async def run_strategic_optimization_task():
531
+ print("\n🧪 [STRATEGIC BACKTEST] Vectorized Logic Mirror Initiated...")
532
  r2 = R2Service()
533
  dm = DataManager(None, None, r2)
534
  proc = MLProcessor(dm)