fix: make sync endpoint market-aware — prevent US scan results from overwriting BIST file
Browse filesRoot cause: /api/trading/sync always wrote scan_results to
paper_trading/bist100_scan_results.json regardless of market_id.
When the US worker synced its data, it overwrote the BIST scan file,
causing both BIST and US eligible pages to show US stocks.
Changes:
- Add market_id field to TradingSyncPayload
- Use get_scan_results_path(market_id) for correct file paths
- Store per-market scan_results in _remote_cache (scan_results:bist, scan_results:us)
- Update _get_trading_status() to read from scan_results:bist cache key
This view is limited to 50 files because it contains too many changes. See raw diff
- _archive_scripts/_backfill_runlog.py +61 -0
- _archive_scripts/_backfill_trades.py +147 -0
- _archive_scripts/_diag.py +30 -0
- ai/predictions_api.py +111 -58
- analysis/walk_forward_backtest.py +30 -6
- com.borsa.scan-bist.plist +46 -0
- com.borsa.scan-us.plist +46 -0
- com.borsa.trading-worker-us.plist +57 -0
- huggingface-space/_test_us_audit.py +595 -0
- huggingface-space/ai/predictions_api.py +111 -58
- huggingface-space/analysis/walk_forward_backtest.py +30 -6
- huggingface-space/app.py +491 -826
- huggingface-space/data/index_constituents.py +1 -1
- huggingface-space/nextjs-app/package.json +1 -2
- huggingface-space/nextjs-app/scripts/trading-auth-smoke.mjs +0 -250
- huggingface-space/nextjs-app/src/app/ai-analysis/page.tsx +53 -62
- huggingface-space/nextjs-app/src/app/announcements/page.tsx +21 -29
- huggingface-space/nextjs-app/src/app/api/eligible/route.ts +25 -163
- huggingface-space/nextjs-app/src/app/api/health/route.ts +2 -2
- huggingface-space/nextjs-app/src/app/api/ml-predictions/route.ts +7 -84
- huggingface-space/nextjs-app/src/app/api/trading/route.ts +59 -254
- huggingface-space/nextjs-app/src/app/auto-trading/page.tsx +133 -410
- huggingface-space/nextjs-app/src/app/eligible/page.tsx +46 -54
- huggingface-space/nextjs-app/src/app/ml-scan/page.tsx +55 -226
- huggingface-space/nextjs-app/src/app/page.tsx +1 -8
- huggingface-space/nextjs-app/src/app/stocks/page.tsx +71 -95
- huggingface-space/nextjs-app/src/app/us-market/page.tsx +0 -556
- huggingface-space/nextjs-app/src/components/MLPredictionCard.tsx +3 -6
- huggingface-space/nextjs-app/src/components/Navigation.tsx +1 -7
- huggingface-space/nextjs-app/src/components/TopMLPredictions.tsx +9 -22
- huggingface-space/nextjs-app/src/lib/api-auth.ts +2 -24
- huggingface-space/requirements.txt +11 -11
- huggingface-space/run_bist100_scan.py +24 -508
- huggingface-space/run_us_scan.py +31 -447
- huggingface-space/start.sh +33 -106
- huggingface-space/sync_eligibility.py +45 -25
- huggingface-space/telegram_bot.py +593 -710
- huggingface-space/test_smoke.py +1 -1
- huggingface-space/trading/__init__.py +1 -0
- huggingface-space/trading/auto_trader.py +31 -38
- huggingface-space/trading/broker_base.py +20 -9
- huggingface-space/trading/circuit_breaker.py +1 -0
- huggingface-space/trading/daily_signals.py +33 -16
- huggingface-space/trading/db_store.py +204 -1
- huggingface-space/trading/market_registry.py +18 -0
- huggingface-space/trading/midas_broker.py +1 -1
- huggingface-space/trading/model_risk.py +97 -19
- huggingface-space/trading/performance_scorecard.py +428 -0
- huggingface-space/trading/risk_gate.py +54 -4
- huggingface-space/trading/scanner_engine.py +403 -0
_archive_scripts/_backfill_runlog.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""One-time backfill of daily_run_log from daemon stderr log history."""
|
| 2 |
+
import re
|
| 3 |
+
import sqlite3
|
| 4 |
+
from collections import defaultdict
|
| 5 |
+
|
| 6 |
+
LOG_FILE = "/Users/onerozbey/Library/Logs/borsa-trading-worker/worker_stderr.log"
|
| 7 |
+
DB_PATH = "/Users/onerozbey/borsa_uygulamasi_runtime/huggingface-space/paper_trading/trading.db"
|
| 8 |
+
|
| 9 |
+
# Parse "Cycle: OK", "Cycle: SKIPPED", etc. from log lines
|
| 10 |
+
pattern = re.compile(r"^(\d{4}-\d{2}-\d{2}) [\d:,]+ .* Cycle: (\w+)")
|
| 11 |
+
|
| 12 |
+
day_status = {} # date -> best status
|
| 13 |
+
with open(LOG_FILE) as f:
|
| 14 |
+
for line in f:
|
| 15 |
+
m = pattern.match(line)
|
| 16 |
+
if m:
|
| 17 |
+
date, status = m.group(1), m.group(2)
|
| 18 |
+
# Priority: OK > SKIPPED > NO_ELIGIBLE_STOCKS
|
| 19 |
+
if status == "OK" or date not in day_status:
|
| 20 |
+
day_status[date] = status
|
| 21 |
+
|
| 22 |
+
print(f"Found {len(day_status)} unique dates with cycles")
|
| 23 |
+
|
| 24 |
+
conn = sqlite3.connect(DB_PATH)
|
| 25 |
+
cur = conn.cursor()
|
| 26 |
+
|
| 27 |
+
# Ensure table exists
|
| 28 |
+
cur.execute("""
|
| 29 |
+
CREATE TABLE IF NOT EXISTS daily_run_log (
|
| 30 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 31 |
+
date TEXT NOT NULL,
|
| 32 |
+
market_id TEXT NOT NULL DEFAULT 'bist',
|
| 33 |
+
status TEXT NOT NULL,
|
| 34 |
+
trades_executed INTEGER DEFAULT 0,
|
| 35 |
+
elapsed_sec REAL DEFAULT 0,
|
| 36 |
+
equity REAL,
|
| 37 |
+
cash REAL,
|
| 38 |
+
model_safe INTEGER,
|
| 39 |
+
cycle_detail TEXT,
|
| 40 |
+
created_at TEXT DEFAULT (datetime('now'))
|
| 41 |
+
)
|
| 42 |
+
""")
|
| 43 |
+
|
| 44 |
+
# Check existing entries
|
| 45 |
+
existing = set(r[0] for r in cur.execute("SELECT DISTINCT date FROM daily_run_log").fetchall())
|
| 46 |
+
print(f"Already have {len(existing)} dates in run_log")
|
| 47 |
+
|
| 48 |
+
inserted = 0
|
| 49 |
+
for date in sorted(day_status):
|
| 50 |
+
if date not in existing:
|
| 51 |
+
status = day_status[date]
|
| 52 |
+
cur.execute(
|
| 53 |
+
"INSERT INTO daily_run_log (date, market_id, status) VALUES (?, 'bist', ?)",
|
| 54 |
+
(date, status)
|
| 55 |
+
)
|
| 56 |
+
inserted += 1
|
| 57 |
+
|
| 58 |
+
conn.commit()
|
| 59 |
+
conn.close()
|
| 60 |
+
print(f"Backfilled {inserted} run log entries")
|
| 61 |
+
print(f"Total run dates: {len(day_status)}")
|
_archive_scripts/_backfill_trades.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""One-time: recover 12 closed trades from daemon logs into runtime DB."""
|
| 2 |
+
import sqlite3, pathlib, datetime as _dt
|
| 3 |
+
|
| 4 |
+
RUNTIME_DB = pathlib.Path.home() / "borsa_uygulamasi_runtime/huggingface-space/paper_trading/trading.db"
|
| 5 |
+
|
| 6 |
+
# Raw fills parsed from daemon stderr log (all 29 entries, in chronological order)
|
| 7 |
+
FILLS = [
|
| 8 |
+
# (date, action, qty, symbol, price, commission)
|
| 9 |
+
("2026-02-25", "BUY", 99, "RALYH", 150.7753, 15.67),
|
| 10 |
+
("2026-02-26", "BUY", 49, "THYAO", 306.4031, 15.76),
|
| 11 |
+
("2026-02-27", "BUY", 1597, "IZENR", 9.4347, 15.82),
|
| 12 |
+
("2026-03-02", "SELL", 99, "RALYH", 147.4263, 15.32),
|
| 13 |
+
("2026-03-02", "SELL", 49, "THYAO", 287.6061, 14.80),
|
| 14 |
+
("2026-03-02", "SELL",1597, "IZENR", 8.9055, 14.93),
|
| 15 |
+
("2026-03-02", "BUY", 1484, "AKSA", 9.8749, 15.39),
|
| 16 |
+
("2026-03-02", "BUY", 1648, "IZENR", 8.9145, 15.43),
|
| 17 |
+
("2026-03-02", "BUY", 79, "ASTOR", 183.3917, 15.21),
|
| 18 |
+
("2026-03-02", "BUY", 50, "THYAO", 287.8939, 15.11),
|
| 19 |
+
("2026-03-03", "SELL",1648, "IZENR", 8.9455, 15.48),
|
| 20 |
+
("2026-03-04", "SELL", 50, "THYAO", 283.8580, 14.90),
|
| 21 |
+
("2026-03-04", "BUY", 185, "AKBNK", 78.5392, 15.26),
|
| 22 |
+
("2026-03-05", "SELL", 79, "ASTOR", 172.2139, 14.29),
|
| 23 |
+
("2026-03-05", "SELL", 185, "AKBNK", 80.9595, 15.73),
|
| 24 |
+
("2026-03-05", "BUY", 84, "ASTOR", 172.3862, 15.20),
|
| 25 |
+
("2026-03-05", "BUY", 98, "RALYH", 147.2736, 15.15),
|
| 26 |
+
("2026-03-05", "BUY", 2702, "BTCIM", 5.4027, 15.33),
|
| 27 |
+
("2026-03-06", "SELL",1484, "AKSA", 10.0150, 15.61),
|
| 28 |
+
("2026-03-06", "SELL", 98, "RALYH", 143.7281, 14.79),
|
| 29 |
+
("2026-03-06", "BUY", 51, "THYAO", 284.1420, 15.22),
|
| 30 |
+
("2026-03-09", "SELL", 51, "THYAO", 267.1164, 14.30),
|
| 31 |
+
("2026-03-11", "SELL", 84, "ASTOR", 193.7031, 17.08),
|
| 32 |
+
("2026-03-12", "SELL",2702, "BTCIM", 6.1269, 17.38),
|
| 33 |
+
# Mar 30 BUYs already in DB as open trades — skip
|
| 34 |
+
]
|
| 35 |
+
|
| 36 |
+
def match_trades(fills):
|
| 37 |
+
"""FIFO matching: pair BUY fills with subsequent SELL fills by (symbol, qty)."""
|
| 38 |
+
open_positions = [] # list of BUY fills waiting for a match
|
| 39 |
+
closed = []
|
| 40 |
+
|
| 41 |
+
for f in fills:
|
| 42 |
+
date, action, qty, sym, price, comm = f
|
| 43 |
+
if action == "BUY":
|
| 44 |
+
open_positions.append(f)
|
| 45 |
+
else: # SELL
|
| 46 |
+
# find matching BUY with same symbol and qty
|
| 47 |
+
for i, op in enumerate(open_positions):
|
| 48 |
+
if op[3] == sym and op[2] == qty:
|
| 49 |
+
entry = op
|
| 50 |
+
exit_ = f
|
| 51 |
+
closed.append((entry, exit_))
|
| 52 |
+
open_positions.pop(i)
|
| 53 |
+
break
|
| 54 |
+
return closed
|
| 55 |
+
|
| 56 |
+
def build_trade_record(entry, exit_):
|
| 57 |
+
e_date, _, qty, sym, e_price, e_comm = entry
|
| 58 |
+
x_date, _, _, _, x_price, x_comm = exit_
|
| 59 |
+
|
| 60 |
+
gross_pnl = round((x_price - e_price) * qty, 4)
|
| 61 |
+
net_pnl = round(gross_pnl - e_comm - x_comm, 4)
|
| 62 |
+
return_pct = round((x_price - e_price) / e_price * 100, 4)
|
| 63 |
+
|
| 64 |
+
d1 = _dt.date.fromisoformat(e_date)
|
| 65 |
+
d2 = _dt.date.fromisoformat(x_date)
|
| 66 |
+
holding_days = (d2 - d1).days
|
| 67 |
+
|
| 68 |
+
return {
|
| 69 |
+
"symbol": sym,
|
| 70 |
+
"side": "BUY",
|
| 71 |
+
"entry_price": e_price,
|
| 72 |
+
"exit_price": x_price,
|
| 73 |
+
"quantity": qty,
|
| 74 |
+
"entry_date": e_date,
|
| 75 |
+
"exit_date": x_date,
|
| 76 |
+
"entry_commission": e_comm,
|
| 77 |
+
"exit_commission": x_comm,
|
| 78 |
+
"gross_pnl": gross_pnl,
|
| 79 |
+
"net_pnl": net_pnl,
|
| 80 |
+
"return_pct": return_pct,
|
| 81 |
+
"holding_days": holding_days,
|
| 82 |
+
"signal_confidence": 0.0,
|
| 83 |
+
"predicted_return": 0.0,
|
| 84 |
+
"entry_reason": "ML_BUY_SIGNAL",
|
| 85 |
+
"exit_reason": "SIGNAL_SELL",
|
| 86 |
+
"is_closed": 1,
|
| 87 |
+
"created_at": f"{e_date} 10:30:00",
|
| 88 |
+
"updated_at": f"{x_date} 10:30:00",
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
def main():
|
| 92 |
+
pairs = match_trades(FILLS)
|
| 93 |
+
print(f"Matched {len(pairs)} closed trades from daemon logs\n")
|
| 94 |
+
|
| 95 |
+
records = []
|
| 96 |
+
total_net = 0.0
|
| 97 |
+
winners = 0
|
| 98 |
+
for entry, exit_ in pairs:
|
| 99 |
+
r = build_trade_record(entry, exit_)
|
| 100 |
+
records.append(r)
|
| 101 |
+
total_net += r["net_pnl"]
|
| 102 |
+
if r["net_pnl"] > 0:
|
| 103 |
+
winners += 1
|
| 104 |
+
print(f" {r['symbol']:6s} {r['entry_date']}→{r['exit_date']} "
|
| 105 |
+
f"net={r['net_pnl']:+.2f} ret={r['return_pct']:+.2f}% "
|
| 106 |
+
f"hold={r['holding_days']}d")
|
| 107 |
+
|
| 108 |
+
losers = len(records) - winners
|
| 109 |
+
win_pnl = sum(r["net_pnl"] for r in records if r["net_pnl"] > 0)
|
| 110 |
+
loss_pnl = abs(sum(r["net_pnl"] for r in records if r["net_pnl"] <= 0))
|
| 111 |
+
pf = win_pnl / loss_pnl if loss_pnl > 0 else 0
|
| 112 |
+
|
| 113 |
+
print(f"\nSummary: {len(records)} trades, {winners}W/{losers}L")
|
| 114 |
+
print(f" WR={winners/len(records)*100:.1f}% PF={pf:.2f} Net={total_net:+.2f}")
|
| 115 |
+
|
| 116 |
+
# Insert into runtime DB
|
| 117 |
+
db = sqlite3.connect(str(RUNTIME_DB))
|
| 118 |
+
cur = db.cursor()
|
| 119 |
+
|
| 120 |
+
# Check for existing closed trades to avoid duplicates
|
| 121 |
+
cur.execute("SELECT COUNT(*) FROM trades WHERE is_closed=1")
|
| 122 |
+
existing = cur.fetchone()[0]
|
| 123 |
+
if existing > 0:
|
| 124 |
+
print(f"\n⚠ Already {existing} closed trades in DB. Skipping insert.")
|
| 125 |
+
db.close()
|
| 126 |
+
return
|
| 127 |
+
|
| 128 |
+
cols = list(records[0].keys())
|
| 129 |
+
placeholders = ",".join(["?"] * len(cols))
|
| 130 |
+
col_names = ",".join(cols)
|
| 131 |
+
|
| 132 |
+
for r in records:
|
| 133 |
+
vals = [r[c] for c in cols]
|
| 134 |
+
cur.execute(f"INSERT INTO trades ({col_names}) VALUES ({placeholders})", vals)
|
| 135 |
+
|
| 136 |
+
db.commit()
|
| 137 |
+
print(f"\n✅ Inserted {len(records)} closed trades into {RUNTIME_DB}")
|
| 138 |
+
|
| 139 |
+
# Verify
|
| 140 |
+
cur.execute("SELECT COUNT(*) FROM trades WHERE is_closed=1")
|
| 141 |
+
print(f" Closed trades in DB: {cur.fetchone()[0]}")
|
| 142 |
+
cur.execute("SELECT COUNT(*) FROM trades WHERE is_closed=0")
|
| 143 |
+
print(f" Open trades in DB: {cur.fetchone()[0]}")
|
| 144 |
+
db.close()
|
| 145 |
+
|
| 146 |
+
if __name__ == "__main__":
|
| 147 |
+
main()
|
_archive_scripts/_diag.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sqlite3
|
| 2 |
+
conn = sqlite3.connect('paper_trading/trading.db')
|
| 3 |
+
cur = conn.cursor()
|
| 4 |
+
|
| 5 |
+
cur.execute('SELECT * FROM daily_run_log ORDER BY date DESC LIMIT 10')
|
| 6 |
+
runs = cur.fetchall()
|
| 7 |
+
print('=== DAILY RUN LOG ===')
|
| 8 |
+
for r in runs: print(r)
|
| 9 |
+
|
| 10 |
+
cur.execute('SELECT date, equity, cash, positions_count FROM daily_snapshots ORDER BY date DESC LIMIT 10')
|
| 11 |
+
snaps = cur.fetchall()
|
| 12 |
+
print('\n=== DAILY SNAPSHOTS ===')
|
| 13 |
+
for s in snaps: print(s)
|
| 14 |
+
|
| 15 |
+
cur.execute("SELECT key, value FROM state WHERE key LIKE '%predict%' OR key LIKE '%model%' OR key LIKE '%bootstrap%'")
|
| 16 |
+
preds = cur.fetchall()
|
| 17 |
+
print('\n=== PREDICTION STATE ===')
|
| 18 |
+
for p in preds: print(p)
|
| 19 |
+
|
| 20 |
+
cur.execute('SELECT id, symbol, side, entry_date, exit_date, is_closed, exit_reason, gross_pnl FROM trades ORDER BY id')
|
| 21 |
+
trades = cur.fetchall()
|
| 22 |
+
print('\n=== ALL TRADES ===')
|
| 23 |
+
for t in trades: print(t)
|
| 24 |
+
|
| 25 |
+
cur.execute('SELECT COUNT(*) FROM signals_log')
|
| 26 |
+
print('\n=== SIGNALS LOG COUNT:', cur.fetchone()[0], '===')
|
| 27 |
+
cur.execute('SELECT date, symbol, action, score FROM signals_log ORDER BY date DESC LIMIT 5')
|
| 28 |
+
for s in cur.fetchall(): print(s)
|
| 29 |
+
|
| 30 |
+
conn.close()
|
ai/predictions_api.py
CHANGED
|
@@ -79,35 +79,31 @@ def _compute_confidence(r2: float, direction_correct: float) -> float:
|
|
| 79 |
r2_c = float(r2) if np.isfinite(r2) else 0.0
|
| 80 |
dir_c = float(direction_correct) if np.isfinite(direction_correct) else 0.5
|
| 81 |
|
| 82 |
-
#
|
| 83 |
-
|
| 84 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
r2_norm = min(1.0, r2_c / 0.25)
|
| 86 |
else:
|
| 87 |
-
|
| 88 |
-
# R²=-0.5→0.10, R²=-2.0→0.0
|
| 89 |
-
r2_norm = max(0.0, 0.20 + r2_c * 0.10)
|
| 90 |
|
| 91 |
-
# ── Direction component (
|
| 92 |
-
#
|
| 93 |
-
if dir_c
|
|
|
|
|
|
|
| 94 |
dir_norm = 1.0
|
| 95 |
-
elif dir_c >= 0.50:
|
| 96 |
-
dir_norm = 0.2 + (dir_c - 0.50) / 0.15 * 0.8
|
| 97 |
else:
|
| 98 |
-
|
| 99 |
-
dir_norm = max(0.0, dir_c / 0.50 * 0.2)
|
| 100 |
-
|
| 101 |
-
# ── Base component (30% weight) ──
|
| 102 |
-
# Having a trained multi-model ensemble is itself valuable.
|
| 103 |
-
# This ensures minimum ~25-30 confidence for any working model.
|
| 104 |
-
base = 0.85
|
| 105 |
|
| 106 |
-
raw = r2_norm * 0.
|
| 107 |
|
| 108 |
-
return max(
|
| 109 |
except Exception:
|
| 110 |
-
return
|
| 111 |
|
| 112 |
|
| 113 |
def _apply_shrinkage(predicted_change_pct: float, confidence_pct: float, days_ahead: int) -> float:
|
|
@@ -330,6 +326,10 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 330 |
# Current price is the most recent close; prediction is for +days_ahead
|
| 331 |
current_price = float(df['Close'].iloc[-1])
|
| 332 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 333 |
# Final safety: ensure finite values for sklearn
|
| 334 |
X = np.nan_to_num(X, nan=0.0, posinf=0.0, neginf=0.0)
|
| 335 |
y = np.nan_to_num(y, nan=0.0, posinf=0.0, neginf=0.0)
|
|
@@ -345,66 +345,108 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 345 |
val_start = split_idx # fallback if not enough data for purge
|
| 346 |
X_train, X_test = X_scaled[:split_idx], X_scaled[val_start:]
|
| 347 |
y_train, y_test = y[:split_idx], y[val_start:]
|
| 348 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 349 |
# Model seç ve eğit
|
| 350 |
if model_type == 'xgboost' and XGBOOST_AVAILABLE:
|
| 351 |
model = xgb.XGBRegressor(
|
| 352 |
-
n_estimators=
|
| 353 |
-
max_depth=
|
| 354 |
-
learning_rate=0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 355 |
random_state=42,
|
| 356 |
-
n_jobs=-1
|
| 357 |
)
|
| 358 |
elif model_type == 'lightgbm' and LIGHTGBM_AVAILABLE:
|
| 359 |
model = lgb.LGBMRegressor(
|
| 360 |
-
n_estimators=
|
| 361 |
-
max_depth=
|
| 362 |
-
learning_rate=0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 363 |
random_state=42,
|
| 364 |
n_jobs=-1,
|
| 365 |
-
verbose=-1
|
| 366 |
)
|
| 367 |
elif model_type == 'rf':
|
| 368 |
model = RandomForestRegressor(
|
| 369 |
-
n_estimators=
|
| 370 |
-
max_depth=
|
|
|
|
|
|
|
|
|
|
| 371 |
random_state=42,
|
| 372 |
-
n_jobs=-1
|
| 373 |
)
|
| 374 |
elif model_type == 'ensemble':
|
| 375 |
-
# Ensemble: XGBoost + LightGBM + RandomForest
|
| 376 |
models = []
|
| 377 |
|
| 378 |
if XGBOOST_AVAILABLE:
|
| 379 |
xgb_model = xgb.XGBRegressor(
|
| 380 |
-
n_estimators=
|
| 381 |
-
max_depth=
|
| 382 |
-
learning_rate=0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 383 |
random_state=42,
|
| 384 |
-
n_jobs=-1
|
| 385 |
)
|
| 386 |
-
xgb_model.fit(X_train, y_train)
|
| 387 |
models.append(xgb_model)
|
| 388 |
|
| 389 |
if LIGHTGBM_AVAILABLE:
|
| 390 |
lgb_model = lgb.LGBMRegressor(
|
| 391 |
-
n_estimators=
|
| 392 |
-
max_depth=
|
| 393 |
-
learning_rate=0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 394 |
random_state=42,
|
| 395 |
n_jobs=-1,
|
| 396 |
-
verbose=-1
|
| 397 |
)
|
| 398 |
-
lgb_model.fit(X_train, y_train)
|
| 399 |
models.append(lgb_model)
|
| 400 |
|
| 401 |
rf_model = RandomForestRegressor(
|
| 402 |
-
n_estimators=
|
| 403 |
-
max_depth=
|
|
|
|
|
|
|
|
|
|
| 404 |
random_state=42,
|
| 405 |
-
n_jobs=-1
|
| 406 |
)
|
| 407 |
-
rf_model.fit(X_train, y_train)
|
| 408 |
models.append(rf_model)
|
| 409 |
|
| 410 |
# Ensemble tahmin: ortalama
|
|
@@ -433,15 +475,18 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 433 |
|
| 434 |
confidence_pct = _compute_confidence(r2, direction_correct)
|
| 435 |
|
| 436 |
-
# Son tahmin (latest bar)
|
| 437 |
last_features_raw = np.nan_to_num(X_all[-1].reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0)
|
| 438 |
-
last_features = scaler_X.transform(last_features_raw)
|
| 439 |
predictions_final = np.array([
|
| 440 |
float(np.asarray(m.predict(last_features), dtype=float).ravel()[0])
|
| 441 |
for m in models
|
| 442 |
], dtype=float)
|
| 443 |
predicted_change = float(predictions_final.mean()) # already % return
|
| 444 |
|
|
|
|
|
|
|
|
|
|
| 445 |
# Bayesian shrinkage: dampen extreme predictions when confidence is low
|
| 446 |
predicted_change = _apply_shrinkage(predicted_change, confidence_pct, days_ahead)
|
| 447 |
predicted_price = current_price * (1 + predicted_change / 100)
|
|
@@ -469,18 +514,23 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 469 |
return res
|
| 470 |
else:
|
| 471 |
# Fallback to GradientBoosting if no advanced models
|
| 472 |
-
model = GradientBoostingRegressor(
|
|
|
|
|
|
|
|
|
|
| 473 |
else:
|
| 474 |
# Default: GradientBoosting
|
| 475 |
model = GradientBoostingRegressor(
|
| 476 |
-
n_estimators=
|
| 477 |
-
max_depth=
|
| 478 |
-
learning_rate=0.
|
| 479 |
-
|
|
|
|
|
|
|
| 480 |
)
|
| 481 |
|
| 482 |
# Tek model durumunda eğitim
|
| 483 |
-
model.fit(X_train, y_train)
|
| 484 |
|
| 485 |
# Test seti üzerinde performans (target is % return, no inverse transform)
|
| 486 |
y_pred_test = np.asarray(model.predict(X_test), dtype=float)
|
|
@@ -505,11 +555,14 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 505 |
# Convert to a UI-friendly 0-100 confidence score
|
| 506 |
confidence_pct = _compute_confidence(r2, direction_correct)
|
| 507 |
|
| 508 |
-
# Tahmin yap (latest bar)
|
| 509 |
last_features_raw = np.nan_to_num(X_all[-1].reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0)
|
| 510 |
-
last_features = scaler_X.transform(last_features_raw)
|
| 511 |
predicted_change = float(np.asarray(model.predict(last_features), dtype=float).ravel()[0]) # already % return
|
| 512 |
|
|
|
|
|
|
|
|
|
|
| 513 |
# Bayesian shrinkage: dampen extreme predictions when confidence is low
|
| 514 |
predicted_change = _apply_shrinkage(predicted_change, confidence_pct, days_ahead)
|
| 515 |
predicted_price = current_price * (1 + predicted_change / 100)
|
|
|
|
| 79 |
r2_c = float(r2) if np.isfinite(r2) else 0.0
|
| 80 |
dir_c = float(direction_correct) if np.isfinite(direction_correct) else 0.5
|
| 81 |
|
| 82 |
+
# Hard floor: if model is no better than random, confidence = 0
|
| 83 |
+
if r2_c <= 0 and dir_c <= 0.52:
|
| 84 |
+
return 0.0
|
| 85 |
+
|
| 86 |
+
# ── R² component (40% weight) ──
|
| 87 |
+
# Financial R²: 0.0→0, 0.05→0.40, 0.15→0.80, 0.25→1.0
|
| 88 |
+
if r2_c > 0:
|
| 89 |
r2_norm = min(1.0, r2_c / 0.25)
|
| 90 |
else:
|
| 91 |
+
r2_norm = 0.0
|
|
|
|
|
|
|
| 92 |
|
| 93 |
+
# ── Direction component (60% weight) ──
|
| 94 |
+
# ≤52%→0, 55%→0.30, 60%→0.67, 65%→1.0
|
| 95 |
+
if dir_c <= 0.52:
|
| 96 |
+
dir_norm = 0.0
|
| 97 |
+
elif dir_c >= 0.65:
|
| 98 |
dir_norm = 1.0
|
|
|
|
|
|
|
| 99 |
else:
|
| 100 |
+
dir_norm = (dir_c - 0.52) / (0.65 - 0.52)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
|
| 102 |
+
raw = r2_norm * 0.40 + dir_norm * 0.60
|
| 103 |
|
| 104 |
+
return max(0.0, min(95.0, raw * 100.0))
|
| 105 |
except Exception:
|
| 106 |
+
return 0.0
|
| 107 |
|
| 108 |
|
| 109 |
def _apply_shrinkage(predicted_change_pct: float, confidence_pct: float, days_ahead: int) -> float:
|
|
|
|
| 326 |
# Current price is the most recent close; prediction is for +days_ahead
|
| 327 |
current_price = float(df['Close'].iloc[-1])
|
| 328 |
|
| 329 |
+
# ── Target clipping: cap extreme returns to prevent outlier-driven training ──
|
| 330 |
+
_clip_limit = 3.5 * float(np.sqrt(max(1, days_ahead))) # ~9% for 7 days
|
| 331 |
+
y = np.clip(y, -_clip_limit, _clip_limit)
|
| 332 |
+
|
| 333 |
# Final safety: ensure finite values for sklearn
|
| 334 |
X = np.nan_to_num(X, nan=0.0, posinf=0.0, neginf=0.0)
|
| 335 |
y = np.nan_to_num(y, nan=0.0, posinf=0.0, neginf=0.0)
|
|
|
|
| 345 |
val_start = split_idx # fallback if not enough data for purge
|
| 346 |
X_train, X_test = X_scaled[:split_idx], X_scaled[val_start:]
|
| 347 |
y_train, y_test = y[:split_idx], y[val_start:]
|
| 348 |
+
|
| 349 |
+
# ── Sample weighting: exponential recency (recent data 3x more important) ──
|
| 350 |
+
_n_train = len(X_train)
|
| 351 |
+
_sample_weights = np.exp(np.linspace(-1.0, 0.0, _n_train))
|
| 352 |
+
|
| 353 |
+
# ── Feature importance selection: reduce overfitting from high dimensionality ──
|
| 354 |
+
_n_keep = min(10, X_train.shape[1])
|
| 355 |
+
_selector_rf = RandomForestRegressor(
|
| 356 |
+
n_estimators=50, max_depth=4, min_samples_leaf=5,
|
| 357 |
+
max_features='sqrt', random_state=42, n_jobs=-1,
|
| 358 |
+
)
|
| 359 |
+
_selector_rf.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 360 |
+
_importances = _selector_rf.feature_importances_
|
| 361 |
+
_top_feat_idx = np.argsort(_importances)[-_n_keep:]
|
| 362 |
+
X_train = X_train[:, _top_feat_idx]
|
| 363 |
+
X_test = X_test[:, _top_feat_idx]
|
| 364 |
+
|
| 365 |
# Model seç ve eğit
|
| 366 |
if model_type == 'xgboost' and XGBOOST_AVAILABLE:
|
| 367 |
model = xgb.XGBRegressor(
|
| 368 |
+
n_estimators=200,
|
| 369 |
+
max_depth=3,
|
| 370 |
+
learning_rate=0.03,
|
| 371 |
+
subsample=0.8,
|
| 372 |
+
colsample_bytree=0.7,
|
| 373 |
+
reg_alpha=0.1,
|
| 374 |
+
reg_lambda=1.0,
|
| 375 |
+
min_child_weight=5,
|
| 376 |
random_state=42,
|
| 377 |
+
n_jobs=-1,
|
| 378 |
)
|
| 379 |
elif model_type == 'lightgbm' and LIGHTGBM_AVAILABLE:
|
| 380 |
model = lgb.LGBMRegressor(
|
| 381 |
+
n_estimators=200,
|
| 382 |
+
max_depth=3,
|
| 383 |
+
learning_rate=0.03,
|
| 384 |
+
subsample=0.8,
|
| 385 |
+
colsample_bytree=0.7,
|
| 386 |
+
reg_alpha=0.1,
|
| 387 |
+
reg_lambda=1.0,
|
| 388 |
+
min_child_samples=10,
|
| 389 |
random_state=42,
|
| 390 |
n_jobs=-1,
|
| 391 |
+
verbose=-1,
|
| 392 |
)
|
| 393 |
elif model_type == 'rf':
|
| 394 |
model = RandomForestRegressor(
|
| 395 |
+
n_estimators=200,
|
| 396 |
+
max_depth=3,
|
| 397 |
+
min_samples_split=10,
|
| 398 |
+
min_samples_leaf=5,
|
| 399 |
+
max_features='sqrt',
|
| 400 |
random_state=42,
|
| 401 |
+
n_jobs=-1,
|
| 402 |
)
|
| 403 |
elif model_type == 'ensemble':
|
| 404 |
+
# Ensemble: XGBoost + LightGBM + RandomForest (regularized)
|
| 405 |
models = []
|
| 406 |
|
| 407 |
if XGBOOST_AVAILABLE:
|
| 408 |
xgb_model = xgb.XGBRegressor(
|
| 409 |
+
n_estimators=200,
|
| 410 |
+
max_depth=3,
|
| 411 |
+
learning_rate=0.03,
|
| 412 |
+
subsample=0.8,
|
| 413 |
+
colsample_bytree=0.7,
|
| 414 |
+
reg_alpha=0.1,
|
| 415 |
+
reg_lambda=1.0,
|
| 416 |
+
min_child_weight=5,
|
| 417 |
random_state=42,
|
| 418 |
+
n_jobs=-1,
|
| 419 |
)
|
| 420 |
+
xgb_model.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 421 |
models.append(xgb_model)
|
| 422 |
|
| 423 |
if LIGHTGBM_AVAILABLE:
|
| 424 |
lgb_model = lgb.LGBMRegressor(
|
| 425 |
+
n_estimators=200,
|
| 426 |
+
max_depth=3,
|
| 427 |
+
learning_rate=0.03,
|
| 428 |
+
subsample=0.8,
|
| 429 |
+
colsample_bytree=0.7,
|
| 430 |
+
reg_alpha=0.1,
|
| 431 |
+
reg_lambda=1.0,
|
| 432 |
+
min_child_samples=10,
|
| 433 |
random_state=42,
|
| 434 |
n_jobs=-1,
|
| 435 |
+
verbose=-1,
|
| 436 |
)
|
| 437 |
+
lgb_model.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 438 |
models.append(lgb_model)
|
| 439 |
|
| 440 |
rf_model = RandomForestRegressor(
|
| 441 |
+
n_estimators=200,
|
| 442 |
+
max_depth=3,
|
| 443 |
+
min_samples_split=10,
|
| 444 |
+
min_samples_leaf=5,
|
| 445 |
+
max_features='sqrt',
|
| 446 |
random_state=42,
|
| 447 |
+
n_jobs=-1,
|
| 448 |
)
|
| 449 |
+
rf_model.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 450 |
models.append(rf_model)
|
| 451 |
|
| 452 |
# Ensemble tahmin: ortalama
|
|
|
|
| 475 |
|
| 476 |
confidence_pct = _compute_confidence(r2, direction_correct)
|
| 477 |
|
| 478 |
+
# Son tahmin (latest bar — apply same feature selection)
|
| 479 |
last_features_raw = np.nan_to_num(X_all[-1].reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0)
|
| 480 |
+
last_features = scaler_X.transform(last_features_raw)[:, _top_feat_idx]
|
| 481 |
predictions_final = np.array([
|
| 482 |
float(np.asarray(m.predict(last_features), dtype=float).ravel()[0])
|
| 483 |
for m in models
|
| 484 |
], dtype=float)
|
| 485 |
predicted_change = float(predictions_final.mean()) # already % return
|
| 486 |
|
| 487 |
+
# Base shrinkage: models overfit, shrink 70% toward zero
|
| 488 |
+
predicted_change *= 0.30
|
| 489 |
+
|
| 490 |
# Bayesian shrinkage: dampen extreme predictions when confidence is low
|
| 491 |
predicted_change = _apply_shrinkage(predicted_change, confidence_pct, days_ahead)
|
| 492 |
predicted_price = current_price * (1 + predicted_change / 100)
|
|
|
|
| 514 |
return res
|
| 515 |
else:
|
| 516 |
# Fallback to GradientBoosting if no advanced models
|
| 517 |
+
model = GradientBoostingRegressor(
|
| 518 |
+
n_estimators=200, max_depth=3, learning_rate=0.03,
|
| 519 |
+
subsample=0.8, min_samples_split=10, random_state=42,
|
| 520 |
+
)
|
| 521 |
else:
|
| 522 |
# Default: GradientBoosting
|
| 523 |
model = GradientBoostingRegressor(
|
| 524 |
+
n_estimators=200,
|
| 525 |
+
max_depth=3,
|
| 526 |
+
learning_rate=0.03,
|
| 527 |
+
subsample=0.8,
|
| 528 |
+
min_samples_split=10,
|
| 529 |
+
random_state=42,
|
| 530 |
)
|
| 531 |
|
| 532 |
# Tek model durumunda eğitim
|
| 533 |
+
model.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 534 |
|
| 535 |
# Test seti üzerinde performans (target is % return, no inverse transform)
|
| 536 |
y_pred_test = np.asarray(model.predict(X_test), dtype=float)
|
|
|
|
| 555 |
# Convert to a UI-friendly 0-100 confidence score
|
| 556 |
confidence_pct = _compute_confidence(r2, direction_correct)
|
| 557 |
|
| 558 |
+
# Tahmin yap (latest bar — apply same feature selection)
|
| 559 |
last_features_raw = np.nan_to_num(X_all[-1].reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0)
|
| 560 |
+
last_features = scaler_X.transform(last_features_raw)[:, _top_feat_idx]
|
| 561 |
predicted_change = float(np.asarray(model.predict(last_features), dtype=float).ravel()[0]) # already % return
|
| 562 |
|
| 563 |
+
# Base shrinkage: models overfit, shrink 70% toward zero
|
| 564 |
+
predicted_change *= 0.30
|
| 565 |
+
|
| 566 |
# Bayesian shrinkage: dampen extreme predictions when confidence is low
|
| 567 |
predicted_change = _apply_shrinkage(predicted_change, confidence_pct, days_ahead)
|
| 568 |
predicted_price = current_price * (1 + predicted_change / 100)
|
analysis/walk_forward_backtest.py
CHANGED
|
@@ -297,6 +297,13 @@ def walk_forward_backtest(
|
|
| 297 |
df_feat = add_macro_features(df_feat)
|
| 298 |
df_feat["target_return"] = (df_feat["Close"].shift(-days_ahead) / df_feat["Close"] - 1) * 100.0
|
| 299 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 300 |
# Corporate action filter: poison target_return around suspected artifact days
|
| 301 |
# so the model never trains on contaminated bedelsiz/bedelli/temettu data.
|
| 302 |
_ca_suspect = flag_corp_action_days(df)
|
|
@@ -395,10 +402,13 @@ def walk_forward_backtest(
|
|
| 395 |
X_test_s = scaler.transform(np.nan_to_num(X_test, nan=0.0, posinf=0.0, neginf=0.0))
|
| 396 |
|
| 397 |
# --- Feature importance selection: train quick RF, keep top features ---
|
| 398 |
-
_sel_rf = RandomForestRegressor(
|
|
|
|
|
|
|
|
|
|
| 399 |
_sel_rf.fit(X_train_s, y_train)
|
| 400 |
importances = _sel_rf.feature_importances_
|
| 401 |
-
n_keep = min(
|
| 402 |
top_idx = np.argsort(importances)[-n_keep:]
|
| 403 |
X_train_s = X_train_s[:, top_idx]
|
| 404 |
X_test_s = X_test_s[:, top_idx]
|
|
@@ -413,10 +423,14 @@ def walk_forward_backtest(
|
|
| 413 |
|
| 414 |
# --- Ensemble of classifiers ---
|
| 415 |
clf_rf = RandomForestClassifier(
|
| 416 |
-
n_estimators=200, max_depth=
|
|
|
|
|
|
|
| 417 |
)
|
| 418 |
clf_gb = GradientBoostingClassifier(
|
| 419 |
-
n_estimators=
|
|
|
|
|
|
|
| 420 |
)
|
| 421 |
clf_rf.fit(X_train_s, y_train_cls, sample_weight=sample_weights)
|
| 422 |
clf_gb.fit(X_train_s, y_train_cls, sample_weight=sample_weights)
|
|
@@ -435,9 +449,17 @@ def walk_forward_backtest(
|
|
| 435 |
# Also train regression model for magnitude estimate
|
| 436 |
reg_model: Any
|
| 437 |
if str(model_type).lower() == "rf":
|
| 438 |
-
reg_model = RandomForestRegressor(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 439 |
else:
|
| 440 |
-
reg_model = GradientBoostingRegressor(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 441 |
reg_model.fit(X_train_s, y_train, sample_weight=sample_weights)
|
| 442 |
y_pred_reg = np.asarray(reg_model.predict(X_test_s), dtype=float)
|
| 443 |
r2 = float(r2_score(y_test, y_pred_reg))
|
|
@@ -457,6 +479,7 @@ def walk_forward_backtest(
|
|
| 457 |
X_pred_row = scaler.transform(np.nan_to_num(row_t[FEATURES].to_numpy(dtype=float).reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0))
|
| 458 |
X_pred_sel = X_pred_row[:, top_idx]
|
| 459 |
reg_pred = float(np.asarray(reg_model.predict(X_pred_sel), dtype=float).ravel()[0])
|
|
|
|
| 460 |
predicted_change = float(_apply_shrinkage(reg_pred, confidence_pct, days_ahead))
|
| 461 |
ml_signal = "HOLD" # Model not confident enough
|
| 462 |
_current_prob_up = 0.5
|
|
@@ -472,6 +495,7 @@ def walk_forward_backtest(
|
|
| 472 |
|
| 473 |
# Regression for magnitude
|
| 474 |
reg_pred = float(np.asarray(reg_model.predict(X_pred_sel), dtype=float).ravel()[0])
|
|
|
|
| 475 |
predicted_change = float(_apply_shrinkage(reg_pred, confidence_pct, days_ahead))
|
| 476 |
|
| 477 |
# Signal from classification probability — RAISED thresholds for higher conviction
|
|
|
|
| 297 |
df_feat = add_macro_features(df_feat)
|
| 298 |
df_feat["target_return"] = (df_feat["Close"].shift(-days_ahead) / df_feat["Close"] - 1) * 100.0
|
| 299 |
|
| 300 |
+
# Target clipping: cap extreme returns to prevent outlier-driven training
|
| 301 |
+
_target_clip = 3.5 * float(np.sqrt(max(1, days_ahead))) # ~9% for 7 days
|
| 302 |
+
_extreme_mask = df_feat["target_return"].abs() > _target_clip
|
| 303 |
+
df_feat.loc[_extreme_mask, "target_return"] = np.clip(
|
| 304 |
+
df_feat.loc[_extreme_mask, "target_return"], -_target_clip, _target_clip,
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
# Corporate action filter: poison target_return around suspected artifact days
|
| 308 |
# so the model never trains on contaminated bedelsiz/bedelli/temettu data.
|
| 309 |
_ca_suspect = flag_corp_action_days(df)
|
|
|
|
| 402 |
X_test_s = scaler.transform(np.nan_to_num(X_test, nan=0.0, posinf=0.0, neginf=0.0))
|
| 403 |
|
| 404 |
# --- Feature importance selection: train quick RF, keep top features ---
|
| 405 |
+
_sel_rf = RandomForestRegressor(
|
| 406 |
+
n_estimators=50, max_depth=4, min_samples_leaf=5,
|
| 407 |
+
max_features='sqrt', random_state=42, n_jobs=-1,
|
| 408 |
+
)
|
| 409 |
_sel_rf.fit(X_train_s, y_train)
|
| 410 |
importances = _sel_rf.feature_importances_
|
| 411 |
+
n_keep = min(10, len(FEATURES))
|
| 412 |
top_idx = np.argsort(importances)[-n_keep:]
|
| 413 |
X_train_s = X_train_s[:, top_idx]
|
| 414 |
X_test_s = X_test_s[:, top_idx]
|
|
|
|
| 423 |
|
| 424 |
# --- Ensemble of classifiers ---
|
| 425 |
clf_rf = RandomForestClassifier(
|
| 426 |
+
n_estimators=200, max_depth=3, min_samples_split=10,
|
| 427 |
+
min_samples_leaf=5, max_features='sqrt',
|
| 428 |
+
random_state=42, n_jobs=-1, class_weight="balanced",
|
| 429 |
)
|
| 430 |
clf_gb = GradientBoostingClassifier(
|
| 431 |
+
n_estimators=200, max_depth=3, learning_rate=0.03,
|
| 432 |
+
subsample=0.8, min_samples_split=10,
|
| 433 |
+
random_state=42,
|
| 434 |
)
|
| 435 |
clf_rf.fit(X_train_s, y_train_cls, sample_weight=sample_weights)
|
| 436 |
clf_gb.fit(X_train_s, y_train_cls, sample_weight=sample_weights)
|
|
|
|
| 449 |
# Also train regression model for magnitude estimate
|
| 450 |
reg_model: Any
|
| 451 |
if str(model_type).lower() == "rf":
|
| 452 |
+
reg_model = RandomForestRegressor(
|
| 453 |
+
n_estimators=200, max_depth=3, min_samples_split=10,
|
| 454 |
+
min_samples_leaf=5, max_features='sqrt',
|
| 455 |
+
random_state=42, n_jobs=-1,
|
| 456 |
+
)
|
| 457 |
else:
|
| 458 |
+
reg_model = GradientBoostingRegressor(
|
| 459 |
+
n_estimators=200, max_depth=3, learning_rate=0.03,
|
| 460 |
+
subsample=0.8, min_samples_split=10,
|
| 461 |
+
random_state=42,
|
| 462 |
+
)
|
| 463 |
reg_model.fit(X_train_s, y_train, sample_weight=sample_weights)
|
| 464 |
y_pred_reg = np.asarray(reg_model.predict(X_test_s), dtype=float)
|
| 465 |
r2 = float(r2_score(y_test, y_pred_reg))
|
|
|
|
| 479 |
X_pred_row = scaler.transform(np.nan_to_num(row_t[FEATURES].to_numpy(dtype=float).reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0))
|
| 480 |
X_pred_sel = X_pred_row[:, top_idx]
|
| 481 |
reg_pred = float(np.asarray(reg_model.predict(X_pred_sel), dtype=float).ravel()[0])
|
| 482 |
+
reg_pred *= 0.30 # Base shrinkage: 70% toward zero
|
| 483 |
predicted_change = float(_apply_shrinkage(reg_pred, confidence_pct, days_ahead))
|
| 484 |
ml_signal = "HOLD" # Model not confident enough
|
| 485 |
_current_prob_up = 0.5
|
|
|
|
| 495 |
|
| 496 |
# Regression for magnitude
|
| 497 |
reg_pred = float(np.asarray(reg_model.predict(X_pred_sel), dtype=float).ravel()[0])
|
| 498 |
+
reg_pred *= 0.30 # Base shrinkage: 70% toward zero
|
| 499 |
predicted_change = float(_apply_shrinkage(reg_pred, confidence_pct, days_ahead))
|
| 500 |
|
| 501 |
# Signal from classification probability — RAISED thresholds for higher conviction
|
com.borsa.scan-bist.plist
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
| 3 |
+
<plist version="1.0">
|
| 4 |
+
<dict>
|
| 5 |
+
<key>Label</key>
|
| 6 |
+
<string>com.borsa.scan-bist</string>
|
| 7 |
+
|
| 8 |
+
<key>ProgramArguments</key>
|
| 9 |
+
<array>
|
| 10 |
+
<string>/Users/onerozbey/borsa_uygulamasi_runtime/huggingface-space/.venv/bin/python</string>
|
| 11 |
+
<string>run_bist100_scan.py</string>
|
| 12 |
+
<string>--force</string>
|
| 13 |
+
</array>
|
| 14 |
+
|
| 15 |
+
<key>WorkingDirectory</key>
|
| 16 |
+
<string>/Users/onerozbey/borsa_uygulamasi_runtime/huggingface-space</string>
|
| 17 |
+
|
| 18 |
+
<key>EnvironmentVariables</key>
|
| 19 |
+
<dict>
|
| 20 |
+
<key>PYTHONPATH</key>
|
| 21 |
+
<string>/Users/onerozbey/borsa_uygulamasi_runtime/huggingface-space</string>
|
| 22 |
+
<key>PYTHONUNBUFFERED</key>
|
| 23 |
+
<string>1</string>
|
| 24 |
+
</dict>
|
| 25 |
+
|
| 26 |
+
<!-- Run every Sunday at 18:00 local time -->
|
| 27 |
+
<key>StartCalendarInterval</key>
|
| 28 |
+
<dict>
|
| 29 |
+
<key>Weekday</key>
|
| 30 |
+
<integer>0</integer>
|
| 31 |
+
<key>Hour</key>
|
| 32 |
+
<integer>18</integer>
|
| 33 |
+
<key>Minute</key>
|
| 34 |
+
<integer>0</integer>
|
| 35 |
+
</dict>
|
| 36 |
+
|
| 37 |
+
<key>StandardOutPath</key>
|
| 38 |
+
<string>/Users/onerozbey/Library/Logs/borsa-trading-worker/scan_bist_stdout.log</string>
|
| 39 |
+
|
| 40 |
+
<key>StandardErrorPath</key>
|
| 41 |
+
<string>/Users/onerozbey/Library/Logs/borsa-trading-worker/scan_bist_stderr.log</string>
|
| 42 |
+
|
| 43 |
+
<key>Nice</key>
|
| 44 |
+
<integer>10</integer>
|
| 45 |
+
</dict>
|
| 46 |
+
</plist>
|
com.borsa.scan-us.plist
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
| 3 |
+
<plist version="1.0">
|
| 4 |
+
<dict>
|
| 5 |
+
<key>Label</key>
|
| 6 |
+
<string>com.borsa.scan-us</string>
|
| 7 |
+
|
| 8 |
+
<key>ProgramArguments</key>
|
| 9 |
+
<array>
|
| 10 |
+
<string>/Users/onerozbey/borsa_uygulamasi_runtime/huggingface-space/.venv/bin/python</string>
|
| 11 |
+
<string>run_us_scan.py</string>
|
| 12 |
+
<string>--force</string>
|
| 13 |
+
</array>
|
| 14 |
+
|
| 15 |
+
<key>WorkingDirectory</key>
|
| 16 |
+
<string>/Users/onerozbey/borsa_uygulamasi_runtime/huggingface-space</string>
|
| 17 |
+
|
| 18 |
+
<key>EnvironmentVariables</key>
|
| 19 |
+
<dict>
|
| 20 |
+
<key>PYTHONPATH</key>
|
| 21 |
+
<string>/Users/onerozbey/borsa_uygulamasi_runtime/huggingface-space</string>
|
| 22 |
+
<key>PYTHONUNBUFFERED</key>
|
| 23 |
+
<string>1</string>
|
| 24 |
+
</dict>
|
| 25 |
+
|
| 26 |
+
<!-- Run every Sunday at 23:00 local time (after BIST scan finishes) -->
|
| 27 |
+
<key>StartCalendarInterval</key>
|
| 28 |
+
<dict>
|
| 29 |
+
<key>Weekday</key>
|
| 30 |
+
<integer>0</integer>
|
| 31 |
+
<key>Hour</key>
|
| 32 |
+
<integer>23</integer>
|
| 33 |
+
<key>Minute</key>
|
| 34 |
+
<integer>0</integer>
|
| 35 |
+
</dict>
|
| 36 |
+
|
| 37 |
+
<key>StandardOutPath</key>
|
| 38 |
+
<string>/Users/onerozbey/Library/Logs/borsa-trading-worker/scan_us_stdout.log</string>
|
| 39 |
+
|
| 40 |
+
<key>StandardErrorPath</key>
|
| 41 |
+
<string>/Users/onerozbey/Library/Logs/borsa-trading-worker/scan_us_stderr.log</string>
|
| 42 |
+
|
| 43 |
+
<key>Nice</key>
|
| 44 |
+
<integer>10</integer>
|
| 45 |
+
</dict>
|
| 46 |
+
</plist>
|
com.borsa.trading-worker-us.plist
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
|
| 3 |
+
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
| 4 |
+
<plist version="1.0">
|
| 5 |
+
<dict>
|
| 6 |
+
<key>Label</key>
|
| 7 |
+
<string>com.borsa.trading-worker-us</string>
|
| 8 |
+
|
| 9 |
+
<!--
|
| 10 |
+
US market worker LaunchAgent.
|
| 11 |
+
Runs the worker daemon with --market us for NYSE/NASDAQ/SP100.
|
| 12 |
+
The worker enforces US market hours (16:30 TR / 13:30 UTC open).
|
| 13 |
+
|
| 14 |
+
Use install_launchagent.sh (or the install steps below) to load:
|
| 15 |
+
cp com.borsa.trading-worker-us.plist ~/Library/LaunchAgents/
|
| 16 |
+
launchctl load ~/Library/LaunchAgents/com.borsa.trading-worker-us.plist
|
| 17 |
+
-->
|
| 18 |
+
|
| 19 |
+
<key>ProgramArguments</key>
|
| 20 |
+
<array>
|
| 21 |
+
<string>/usr/bin/env</string>
|
| 22 |
+
<string>python3</string>
|
| 23 |
+
<string>-m</string>
|
| 24 |
+
<string>trading.worker</string>
|
| 25 |
+
<string>--daemon</string>
|
| 26 |
+
<string>--mode</string>
|
| 27 |
+
<string>paper</string>
|
| 28 |
+
<string>--interval</string>
|
| 29 |
+
<string>60</string>
|
| 30 |
+
<string>--market</string>
|
| 31 |
+
<string>us</string>
|
| 32 |
+
</array>
|
| 33 |
+
|
| 34 |
+
<key>WorkingDirectory</key>
|
| 35 |
+
<string>/PATH/TO/borsa_uygulamasi</string>
|
| 36 |
+
|
| 37 |
+
<key>RunAtLoad</key>
|
| 38 |
+
<true/>
|
| 39 |
+
|
| 40 |
+
<key>StandardOutPath</key>
|
| 41 |
+
<string>/PATH/TO/borsa_uygulamasi/paper_trading/markets/us/worker_stdout.log</string>
|
| 42 |
+
|
| 43 |
+
<key>StandardErrorPath</key>
|
| 44 |
+
<string>/PATH/TO/borsa_uygulamasi/paper_trading/markets/us/worker_stderr.log</string>
|
| 45 |
+
|
| 46 |
+
<key>EnvironmentVariables</key>
|
| 47 |
+
<dict>
|
| 48 |
+
<key>PYTHONPATH</key>
|
| 49 |
+
<string>/PATH/TO/borsa_uygulamasi</string>
|
| 50 |
+
<key>PYTHONUNBUFFERED</key>
|
| 51 |
+
<string>1</string>
|
| 52 |
+
</dict>
|
| 53 |
+
|
| 54 |
+
<key>KeepAlive</key>
|
| 55 |
+
<true/>
|
| 56 |
+
</dict>
|
| 57 |
+
</plist>
|
huggingface-space/_test_us_audit.py
ADDED
|
@@ -0,0 +1,595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""Ruthless US market audit script."""
|
| 3 |
+
import sys, json, os, sqlite3, traceback
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from datetime import datetime, timezone
|
| 6 |
+
|
| 7 |
+
sys.path.insert(0, str(Path(__file__).parent))
|
| 8 |
+
|
| 9 |
+
PASS = "✅ PASS"
|
| 10 |
+
FAIL = "❌ FAIL"
|
| 11 |
+
WARN = "⚠️ WARN"
|
| 12 |
+
results = []
|
| 13 |
+
|
| 14 |
+
def check(name, expr_fn):
|
| 15 |
+
try:
|
| 16 |
+
msg = expr_fn()
|
| 17 |
+
if msg is None:
|
| 18 |
+
results.append((PASS, name, "OK"))
|
| 19 |
+
return True
|
| 20 |
+
elif isinstance(msg, str) and msg.startswith("WARN:"):
|
| 21 |
+
results.append((WARN, name, msg[5:].strip()))
|
| 22 |
+
return True
|
| 23 |
+
else:
|
| 24 |
+
results.append((PASS, name, str(msg)))
|
| 25 |
+
return True
|
| 26 |
+
except AssertionError as e:
|
| 27 |
+
results.append((FAIL, name, str(e)))
|
| 28 |
+
return False
|
| 29 |
+
except Exception as e:
|
| 30 |
+
results.append((FAIL, name, f"{type(e).__name__}: {e}"))
|
| 31 |
+
return False
|
| 32 |
+
|
| 33 |
+
# ─────────────────────────────────────────────
|
| 34 |
+
# SECTION 1: MarketConfig
|
| 35 |
+
# ─────────────────────────────────────────────
|
| 36 |
+
print("\n" + "="*60)
|
| 37 |
+
print("SECTION 1: MarketConfig")
|
| 38 |
+
print("="*60)
|
| 39 |
+
|
| 40 |
+
def test_market_config():
|
| 41 |
+
from trading.market_registry import get_market_config
|
| 42 |
+
cfg = get_market_config("us")
|
| 43 |
+
assert cfg.market_id == "us", f"market_id={cfg.market_id}"
|
| 44 |
+
assert cfg.currency == "USD", f"currency={cfg.currency}"
|
| 45 |
+
assert cfg.timezone == "America/New_York", f"tz={cfg.timezone}"
|
| 46 |
+
assert cfg.run_start_hour == 9, f"run_start_hour={cfg.run_start_hour}"
|
| 47 |
+
assert cfg.run_end_hour == 16, f"run_end_hour={cfg.run_end_hour}"
|
| 48 |
+
assert cfg.scan_start_hour == 17, f"scan_start_hour={cfg.scan_start_hour}"
|
| 49 |
+
assert cfg.scan_start_minute == 0
|
| 50 |
+
return f"USD, 09:30-16:00 ET, scan@17:00"
|
| 51 |
+
|
| 52 |
+
check("1.1 MarketConfig basic fields", test_market_config)
|
| 53 |
+
|
| 54 |
+
def test_scan_path():
|
| 55 |
+
from trading.market_registry import get_scan_results_path
|
| 56 |
+
p = get_scan_results_path("us", completed=True)
|
| 57 |
+
assert "markets/us" in str(p), f"wrong path: {p}"
|
| 58 |
+
assert p.name == "scan_results.json", f"wrong filename: {p.name}"
|
| 59 |
+
return f"path={p}, exists={p.exists()}"
|
| 60 |
+
|
| 61 |
+
check("1.2 get_scan_results_path for US", test_scan_path)
|
| 62 |
+
|
| 63 |
+
def test_db_path():
|
| 64 |
+
from trading.market_registry import get_trading_db_path
|
| 65 |
+
p = get_trading_db_path("us")
|
| 66 |
+
assert "markets/us" in str(p), f"wrong path: {p}"
|
| 67 |
+
assert p.name == "trading.db"
|
| 68 |
+
return f"path={p}, exists={p.exists()}"
|
| 69 |
+
|
| 70 |
+
check("1.3 get_trading_db_path for US", test_db_path)
|
| 71 |
+
|
| 72 |
+
def test_market_open():
|
| 73 |
+
from trading.market_registry import is_market_open_window
|
| 74 |
+
from datetime import timezone
|
| 75 |
+
now = datetime.now(timezone.utc)
|
| 76 |
+
result = is_market_open_window("us", now_utc=now)
|
| 77 |
+
# Just verify it returns bool without crashing
|
| 78 |
+
assert isinstance(result, bool)
|
| 79 |
+
return f"is_open={result} (UTC {now.strftime('%H:%M')})"
|
| 80 |
+
|
| 81 |
+
check("1.4 is_market_open_window US", test_market_open)
|
| 82 |
+
|
| 83 |
+
# ─────────────────────────────────────────────
|
| 84 |
+
# SECTION 2: Universe Fetcher
|
| 85 |
+
# ─────────────────────────────────────────────
|
| 86 |
+
print("\n" + "="*60)
|
| 87 |
+
print("SECTION 2: US Universe Fetcher")
|
| 88 |
+
print("="*60)
|
| 89 |
+
|
| 90 |
+
def test_universe_fallback():
|
| 91 |
+
# run_us_scan.py is right here
|
| 92 |
+
from run_us_scan import _SP100_FALLBACK, get_universe
|
| 93 |
+
assert len(_SP100_FALLBACK) == 100, f"SP100 fallback has {len(_SP100_FALLBACK)} symbols (expected 100)"
|
| 94 |
+
# Check no duplicates
|
| 95 |
+
assert len(set(_SP100_FALLBACK)) == len(_SP100_FALLBACK), "Duplicates in fallback list"
|
| 96 |
+
return f"fallback has {len(_SP100_FALLBACK)} unique symbols"
|
| 97 |
+
|
| 98 |
+
check("2.1 SP100 fallback list has 100 unique symbols", test_universe_fallback)
|
| 99 |
+
|
| 100 |
+
def test_get_universe_returns_symbols():
|
| 101 |
+
from run_us_scan import get_universe
|
| 102 |
+
# Use fallback path (will try Wikipedia first but fallback if needed)
|
| 103 |
+
import run_us_scan as m
|
| 104 |
+
orig = m._fetch_universe_from_wikipedia
|
| 105 |
+
m._fetch_universe_from_wikipedia = lambda name: [] # force fallback
|
| 106 |
+
try:
|
| 107 |
+
syms = get_universe("sp100")
|
| 108 |
+
assert len(syms) >= 90, f"Too few symbols: {len(syms)}"
|
| 109 |
+
assert all(isinstance(s, str) and len(s) <= 5 for s in syms), "Bad symbol format"
|
| 110 |
+
finally:
|
| 111 |
+
m._fetch_universe_from_wikipedia = orig
|
| 112 |
+
return f"fallback returns {len(syms)} symbols"
|
| 113 |
+
|
| 114 |
+
check("2.2 get_universe fallback path works", test_get_universe_returns_symbols)
|
| 115 |
+
|
| 116 |
+
def test_result_persistence():
|
| 117 |
+
from run_us_scan import FINAL_RESULTS_FILE, _load_results
|
| 118 |
+
data = _load_results()
|
| 119 |
+
assert isinstance(data, dict)
|
| 120 |
+
assert "stage1" in data
|
| 121 |
+
assert "stage2" in data
|
| 122 |
+
return f"file={'exists' if FINAL_RESULTS_FILE.exists() else 'missing'}, stage1={len(data['stage1'])}, stage2={len(data['stage2'])}"
|
| 123 |
+
|
| 124 |
+
check("2.3 _load_results works", test_result_persistence)
|
| 125 |
+
|
| 126 |
+
# ─────────────────────────────────────────────
|
| 127 |
+
# SECTION 3: Scan Results Quality
|
| 128 |
+
# ─────────────────────────────────────────────
|
| 129 |
+
print("\n" + "="*60)
|
| 130 |
+
print("SECTION 3: Scan Results Quality")
|
| 131 |
+
print("="*60)
|
| 132 |
+
|
| 133 |
+
def test_scan_results_format():
|
| 134 |
+
scan_file = Path("paper_trading/markets/us/scan_results.json")
|
| 135 |
+
if not scan_file.exists():
|
| 136 |
+
raise AssertionError("scan_results.json does not exist")
|
| 137 |
+
data = json.loads(scan_file.read_text())
|
| 138 |
+
assert data.get("market_id") == "us", f"market_id={data.get('market_id')}"
|
| 139 |
+
assert data.get("completed") is True, "scan not completed"
|
| 140 |
+
s1 = data.get("stage1", {})
|
| 141 |
+
s2 = data.get("stage2", {})
|
| 142 |
+
return f"stage1={len(s1)}, stage2={len(s2)}, universe={data.get('universe')}"
|
| 143 |
+
|
| 144 |
+
check("3.1 scan_results.json exists and is completed", test_scan_results_format)
|
| 145 |
+
|
| 146 |
+
def test_scan_has_enough_symbols():
|
| 147 |
+
scan_file = Path("paper_trading/markets/us/scan_results.json")
|
| 148 |
+
if not scan_file.exists():
|
| 149 |
+
raise AssertionError("scan_results.json missing")
|
| 150 |
+
data = json.loads(scan_file.read_text())
|
| 151 |
+
s1 = data.get("stage1", {})
|
| 152 |
+
total = len(s1)
|
| 153 |
+
# A real sp100 scan should have at least 50 symbols
|
| 154 |
+
if total < 10:
|
| 155 |
+
raise AssertionError(f"Only {total} symbols in stage1 — scan was a test run, NOT a full SP100 scan! universekey={data.get('universe_key')}")
|
| 156 |
+
if total < 80:
|
| 157 |
+
return f"WARN: only {total} stage1 symbols — partial scan?"
|
| 158 |
+
return f"{total} stage1 symbols scanned"
|
| 159 |
+
|
| 160 |
+
check("3.2 Scan covers enough SP100 symbols", test_scan_has_enough_symbols)
|
| 161 |
+
|
| 162 |
+
def test_scan_eligible_symbols_field():
|
| 163 |
+
scan_file = Path("paper_trading/markets/us/scan_results.json")
|
| 164 |
+
if not scan_file.exists():
|
| 165 |
+
raise AssertionError("scan_results.json missing")
|
| 166 |
+
data = json.loads(scan_file.read_text())
|
| 167 |
+
# New run_us_scan.py sets eligible_symbols
|
| 168 |
+
el = data.get("eligible_symbols")
|
| 169 |
+
ec = data.get("eligible_count")
|
| 170 |
+
# compute from stage2 as fallback
|
| 171 |
+
stage2 = data.get("stage2", {})
|
| 172 |
+
computed_eligible = [s for s, v in stage2.items() if v.get("eligible")]
|
| 173 |
+
if el is None:
|
| 174 |
+
return f"WARN: eligible_symbols field missing — old scan format (from scanner_engine.py); computed from stage2: {len(computed_eligible)}"
|
| 175 |
+
assert isinstance(el, list), f"eligible_symbols not a list: {type(el)}"
|
| 176 |
+
assert ec == len(el), f"eligible_count={ec} != len(eligible_symbols)={len(el)}"
|
| 177 |
+
return f"{len(el)} eligible symbols"
|
| 178 |
+
|
| 179 |
+
check("3.3 eligible_symbols field present and correct", test_scan_eligible_symbols_field)
|
| 180 |
+
|
| 181 |
+
def test_scan_eligible_via_app_logic():
|
| 182 |
+
"""Simulate how app.py get_eligible_stocks reads the file."""
|
| 183 |
+
scan_file = Path("paper_trading/markets/us/scan_results.json")
|
| 184 |
+
if not scan_file.exists():
|
| 185 |
+
raise AssertionError("scan_results.json missing")
|
| 186 |
+
data = json.loads(scan_file.read_text())
|
| 187 |
+
stage2 = data.get("stage2") or {}
|
| 188 |
+
eligible = [s for s, v in stage2.items() if v.get("eligible")]
|
| 189 |
+
excluded = [s for s, v in stage2.items() if not v.get("eligible")]
|
| 190 |
+
stage1 = data.get("stage1") or {}
|
| 191 |
+
s1_pass = [s for s, v in stage1.items() if v.get("passed")]
|
| 192 |
+
s1_fail = [s for s, v in stage1.items() if not v.get("passed")]
|
| 193 |
+
if len(eligible) == 0 and len(stage2) > 0:
|
| 194 |
+
raise AssertionError(f"0 eligible stocks from {len(stage2)} stage2 — suspicious!")
|
| 195 |
+
return f"stage1: {len(s1_pass)} pass / {len(s1_fail)} fail | stage2: {len(eligible)} eligible / {len(excluded)} excluded | eligible: {eligible[:5]}"
|
| 196 |
+
|
| 197 |
+
check("3.4 Eligible stocks readable via app logic", test_scan_eligible_via_app_logic)
|
| 198 |
+
|
| 199 |
+
# ─────────────────────────────────────────────
|
| 200 |
+
# SECTION 4: Auto-Trader State
|
| 201 |
+
# ─────────────────────────────────────────────
|
| 202 |
+
print("\n" + "="*60)
|
| 203 |
+
print("SECTION 4: Auto-Trader State")
|
| 204 |
+
print("="*60)
|
| 205 |
+
|
| 206 |
+
def test_state_file():
|
| 207 |
+
state_file = Path("paper_trading/markets/us/auto_trader/state.json")
|
| 208 |
+
if not state_file.exists():
|
| 209 |
+
raise AssertionError("state.json missing")
|
| 210 |
+
state = json.loads(state_file.read_text())
|
| 211 |
+
assert state.get("market_id") == "us", f"market_id={state.get('market_id')}"
|
| 212 |
+
cash = float(state.get("broker_cash", 0))
|
| 213 |
+
assert cash > 0, f"cash={cash} (zero?)"
|
| 214 |
+
return f"cash=${cash:,.2f}, total_trades={state.get('total_trades')}, days_run={state.get('total_days_run')}"
|
| 215 |
+
|
| 216 |
+
check("4.1 state.json exists with correct market_id", test_state_file)
|
| 217 |
+
|
| 218 |
+
def test_status_file():
|
| 219 |
+
status_file = Path("paper_trading/markets/us/auto_trader/status.json")
|
| 220 |
+
if not status_file.exists():
|
| 221 |
+
raise AssertionError("status.json missing")
|
| 222 |
+
s = json.loads(status_file.read_text())
|
| 223 |
+
mkt = s.get("market_id")
|
| 224 |
+
if mkt and mkt != "us":
|
| 225 |
+
raise AssertionError(f"status.json market_id={mkt} (should be 'us')")
|
| 226 |
+
return f"status={s.get('status')}, is_running={s.get('is_running')}"
|
| 227 |
+
|
| 228 |
+
check("4.2 status.json exists", test_status_file)
|
| 229 |
+
|
| 230 |
+
def test_no_bist_state_confusion():
|
| 231 |
+
"""State should not have BIST-specific fields crossed in."""
|
| 232 |
+
state_file = Path("paper_trading/markets/us/auto_trader/state.json")
|
| 233 |
+
if not state_file.exists():
|
| 234 |
+
return "WARN: state.json missing — skip"
|
| 235 |
+
state = json.loads(state_file.read_text())
|
| 236 |
+
# positions should not contain .IS symbol suffixes
|
| 237 |
+
positions = state.get("broker_positions", {})
|
| 238 |
+
bist_positions = [s for s in positions if s.endswith(".IS")]
|
| 239 |
+
if bist_positions:
|
| 240 |
+
raise AssertionError(f"US portfolio has BIST symbols: {bist_positions}")
|
| 241 |
+
return f"{len(positions)} open positions, no BIST contamination"
|
| 242 |
+
|
| 243 |
+
check("4.3 No BIST symbols in US portfolio", test_no_bist_state_confusion)
|
| 244 |
+
|
| 245 |
+
def test_initial_cash():
|
| 246 |
+
"""initial_cash should be set (needed for pnlPct calc)."""
|
| 247 |
+
state_file = Path("paper_trading/markets/us/auto_trader/state.json")
|
| 248 |
+
if not state_file.exists():
|
| 249 |
+
raise AssertionError("state.json missing")
|
| 250 |
+
state = json.loads(state_file.read_text())
|
| 251 |
+
ic = state.get("initial_cash")
|
| 252 |
+
if ic is None:
|
| 253 |
+
return "WARN: initial_cash not in state.json — pnlPct will use hardcoded fallback 100000"
|
| 254 |
+
assert float(ic) > 0
|
| 255 |
+
return f"initial_cash=${float(ic):,.0f}"
|
| 256 |
+
|
| 257 |
+
check("4.4 initial_cash field in state.json", test_initial_cash)
|
| 258 |
+
|
| 259 |
+
# ─────────────────────────────────────────────
|
| 260 |
+
# SECTION 5: Trading DB
|
| 261 |
+
# ─────────────────────────────────────────────
|
| 262 |
+
print("\n" + "="*60)
|
| 263 |
+
print("SECTION 5: US Trading Database")
|
| 264 |
+
print("="*60)
|
| 265 |
+
|
| 266 |
+
def test_db_exists():
|
| 267 |
+
db = Path("paper_trading/markets/us/trading.db")
|
| 268 |
+
if not db.exists():
|
| 269 |
+
raise AssertionError("trading.db missing")
|
| 270 |
+
con = sqlite3.connect(db)
|
| 271 |
+
tables = {t[0] for t in con.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()}
|
| 272 |
+
required = {"trades", "daily_snapshots", "signals_log"}
|
| 273 |
+
missing = required - tables
|
| 274 |
+
if missing:
|
| 275 |
+
raise AssertionError(f"Missing tables: {missing}")
|
| 276 |
+
con.close()
|
| 277 |
+
return f"tables: {sorted(tables)}"
|
| 278 |
+
|
| 279 |
+
check("5.1 trading.db exists with required tables", test_db_exists)
|
| 280 |
+
|
| 281 |
+
def test_db_trades():
|
| 282 |
+
db = Path("paper_trading/markets/us/trading.db")
|
| 283 |
+
if not db.exists():
|
| 284 |
+
raise AssertionError("trading.db missing")
|
| 285 |
+
con = sqlite3.connect(db)
|
| 286 |
+
trades = con.execute("SELECT * FROM trades LIMIT 20").fetchall()
|
| 287 |
+
cols = [d[0] for d in con.execute("PRAGMA table_info(trades)").fetchall()]
|
| 288 |
+
con.close()
|
| 289 |
+
if len(trades) == 0:
|
| 290 |
+
return "WARN: 0 trades in DB (no trades yet)"
|
| 291 |
+
# Check the trades don't have BIST symbols
|
| 292 |
+
if "symbol" in cols:
|
| 293 |
+
si = cols.index("symbol")
|
| 294 |
+
else:
|
| 295 |
+
si = 1 # fallback
|
| 296 |
+
symbols = [t[si] for t in trades]
|
| 297 |
+
bist_trades = [s for s in symbols if str(s).endswith(".IS")]
|
| 298 |
+
if bist_trades:
|
| 299 |
+
raise AssertionError(f"US DB has BIST symbol trades: {bist_trades}")
|
| 300 |
+
return f"{len(trades)} trades: {list(set(symbols))}"
|
| 301 |
+
|
| 302 |
+
check("5.2 No BIST symbols in US trades", test_db_trades)
|
| 303 |
+
|
| 304 |
+
def test_db_snapshots():
|
| 305 |
+
db = Path("paper_trading/markets/us/trading.db")
|
| 306 |
+
if not db.exists():
|
| 307 |
+
raise AssertionError("trading.db missing")
|
| 308 |
+
con = sqlite3.connect(db)
|
| 309 |
+
snaps = con.execute("SELECT * FROM daily_snapshots ORDER BY id DESC LIMIT 5").fetchall()
|
| 310 |
+
cols = [d[0] for d in con.execute("PRAGMA table_info(daily_snapshots)").fetchall()]
|
| 311 |
+
con.close()
|
| 312 |
+
if len(snaps) == 0:
|
| 313 |
+
return "WARN: 0 snapshots in DB"
|
| 314 |
+
latest = dict(zip(cols, snaps[0]))
|
| 315 |
+
eq = latest.get("equity", latest.get("total_equity", 0))
|
| 316 |
+
return f"{len(snaps)} snapshots, latest equity=${float(eq or 0):,.2f}"
|
| 317 |
+
|
| 318 |
+
check("5.3 Daily snapshots in US DB", test_db_snapshots)
|
| 319 |
+
|
| 320 |
+
# ─────────────────────────────────────────────
|
| 321 |
+
# SECTION 6: Kill Switch
|
| 322 |
+
# ─────────────────────────────────────────────
|
| 323 |
+
print("\n" + "="*60)
|
| 324 |
+
print("SECTION 6: Kill Switch Logic")
|
| 325 |
+
print("="*60)
|
| 326 |
+
|
| 327 |
+
def test_kill_switch_path():
|
| 328 |
+
# Path from app.py: market_dir / ".kill_switch"
|
| 329 |
+
kill_path = Path("paper_trading/markets/us/.kill_switch")
|
| 330 |
+
# It should NOT exist (kill switch inactive)
|
| 331 |
+
if kill_path.exists():
|
| 332 |
+
content = json.loads(kill_path.read_text())
|
| 333 |
+
if content.get("active"):
|
| 334 |
+
raise AssertionError(f"US kill switch is ACTIVE: {content}")
|
| 335 |
+
return f"WARN: kill switch file exists but active=False: {content}"
|
| 336 |
+
return "kill switch not active (file absent) — OK"
|
| 337 |
+
|
| 338 |
+
check("6.1 US kill switch inactive", test_kill_switch_path)
|
| 339 |
+
|
| 340 |
+
def test_kill_switch_read():
|
| 341 |
+
"""Simulate build_market_snapshot reading kill switch for US."""
|
| 342 |
+
kill_path = Path("paper_trading/markets/us/.kill_switch")
|
| 343 |
+
kill_switch = None
|
| 344 |
+
if kill_path.exists():
|
| 345 |
+
try:
|
| 346 |
+
kill_switch = json.loads(kill_path.read_text())
|
| 347 |
+
except Exception:
|
| 348 |
+
pass
|
| 349 |
+
active = (kill_switch or {}).get("active", False)
|
| 350 |
+
reason = (kill_switch or {}).get("reason")
|
| 351 |
+
assert isinstance(active, bool)
|
| 352 |
+
return f"killSwitchActive={active}, reason={reason}"
|
| 353 |
+
|
| 354 |
+
check("6.2 Kill switch read logic works", test_kill_switch_read)
|
| 355 |
+
|
| 356 |
+
# ─────────────────────────────────────────────
|
| 357 |
+
# SECTION 7: app.py build_market_snapshot for US
|
| 358 |
+
# ─────────────────────────────────────────────
|
| 359 |
+
print("\n" + "="*60)
|
| 360 |
+
print("SECTION 7: app.py build_market_snapshot for US")
|
| 361 |
+
print("="*60)
|
| 362 |
+
|
| 363 |
+
def test_snapshot_import():
|
| 364 |
+
# Check the app.py can be imported (FastAPI won't start, just test imports)
|
| 365 |
+
# We'll test the utility functions only
|
| 366 |
+
from trading.market_registry import get_market_config, get_scan_results_path
|
| 367 |
+
from trading.db_store import TradingStore
|
| 368 |
+
cfg = get_market_config("us")
|
| 369 |
+
assert cfg.currency == "USD"
|
| 370 |
+
return "market_registry and TradingStore importable"
|
| 371 |
+
|
| 372 |
+
check("7.1 Core modules importable", test_snapshot_import)
|
| 373 |
+
|
| 374 |
+
def test_snapshot_pnl_calc():
|
| 375 |
+
"""Test the pnlPct formula with initial_cash from state."""
|
| 376 |
+
state_file = Path("paper_trading/markets/us/auto_trader/state.json")
|
| 377 |
+
if not state_file.exists():
|
| 378 |
+
raise AssertionError("state.json missing")
|
| 379 |
+
state = json.loads(state_file.read_text())
|
| 380 |
+
# Simulate what app.py now does
|
| 381 |
+
equity_value = float(state.get("broker_cash", 100000))
|
| 382 |
+
initial_cash_base = float((state or {}).get("initial_cash", 100000) or 100000)
|
| 383 |
+
pnl_pct = round(((equity_value / initial_cash_base - 1) * 100), 2) if equity_value and initial_cash_base > 0 else 0
|
| 384 |
+
return f"equity=${equity_value:,.2f}, initial_cash=${initial_cash_base:,.0f}, pnlPct={pnl_pct}%"
|
| 385 |
+
|
| 386 |
+
check("7.2 pnlPct calc uses initial_cash correctly", test_snapshot_pnl_calc)
|
| 387 |
+
|
| 388 |
+
def test_currency_label():
|
| 389 |
+
"""Simulate currency and label selection."""
|
| 390 |
+
from trading.market_registry import DEFAULT_MARKET_ID
|
| 391 |
+
market_id = "us"
|
| 392 |
+
currency = "USD" if market_id != DEFAULT_MARKET_ID else "TRY"
|
| 393 |
+
label = "US Equities" if market_id != DEFAULT_MARKET_ID else "Borsa Istanbul"
|
| 394 |
+
assert currency == "USD"
|
| 395 |
+
assert label == "US Equities"
|
| 396 |
+
return f"currency={currency}, label={label}"
|
| 397 |
+
|
| 398 |
+
check("7.3 Currency and label for US", test_currency_label)
|
| 399 |
+
|
| 400 |
+
def test_db_store_reads():
|
| 401 |
+
"""Test TradingStore reads for US DB."""
|
| 402 |
+
from trading.db_store import TradingStore
|
| 403 |
+
db = Path("paper_trading/markets/us/trading.db")
|
| 404 |
+
if not db.exists():
|
| 405 |
+
raise AssertionError("trading.db missing")
|
| 406 |
+
store = TradingStore(db_path=str(db))
|
| 407 |
+
trades = store.get_all_trades()
|
| 408 |
+
equity = store.get_equity_curve()
|
| 409 |
+
signals = store.get_signals(limit=100)
|
| 410 |
+
return f"trades={len(trades)}, equity_points={len(equity)}, signals={len(signals)}"
|
| 411 |
+
|
| 412 |
+
check("7.4 TradingStore reads from US DB", test_db_store_reads)
|
| 413 |
+
|
| 414 |
+
# ─────────────────────────────────────────────
|
| 415 |
+
# SECTION 8: Scheduler Wiring
|
| 416 |
+
# ─────────────────────────────────────────────
|
| 417 |
+
print("\n" + "="*60)
|
| 418 |
+
print("SECTION 8: Scheduler Wiring Check")
|
| 419 |
+
print("="*60)
|
| 420 |
+
|
| 421 |
+
def test_scheduler_function_exists():
|
| 422 |
+
"""Verify _daily_us_scan_scheduler exists in app module without starting it."""
|
| 423 |
+
import ast
|
| 424 |
+
app_src = Path("app.py").read_text()
|
| 425 |
+
tree = ast.parse(app_src)
|
| 426 |
+
fns = [n.name for n in ast.walk(tree) if isinstance(n, ast.FunctionDef)]
|
| 427 |
+
assert "_daily_us_scan_scheduler" in fns, f"not found; available: {[f for f in fns if 'sched' in f.lower() or 'us' in f.lower()]}"
|
| 428 |
+
assert "_daily_scan_scheduler" in fns, "BIST scheduler missing"
|
| 429 |
+
assert "_ensure_background_services_started" in fns
|
| 430 |
+
return f"schedulers: {[f for f in fns if 'sched' in f.lower()]}"
|
| 431 |
+
|
| 432 |
+
check("8.1 _daily_us_scan_scheduler defined in app.py", test_scheduler_function_exists)
|
| 433 |
+
|
| 434 |
+
def test_us_scheduler_thread_global():
|
| 435 |
+
"""Verify _us_scheduler_thread global exists in app.py."""
|
| 436 |
+
app_src = Path("app.py").read_text()
|
| 437 |
+
assert "_us_scheduler_thread" in app_src, "_us_scheduler_thread not found in app.py"
|
| 438 |
+
assert "_us_scheduler_thread = None" in app_src, "thread not initialized to None"
|
| 439 |
+
return "global defined and initialized"
|
| 440 |
+
|
| 441 |
+
check("8.2 _us_scheduler_thread global in app.py", test_us_scheduler_thread_global)
|
| 442 |
+
|
| 443 |
+
def test_ensure_starts_us_scheduler():
|
| 444 |
+
"""Verify _ensure_background_services_started starts US thread."""
|
| 445 |
+
app_src = Path("app.py").read_text()
|
| 446 |
+
assert "target=_daily_us_scan_scheduler" in app_src, "US scheduler not started in _ensure"
|
| 447 |
+
# Check it also checks .is_alive()
|
| 448 |
+
assert "_us_scheduler_thread.is_alive()" in app_src
|
| 449 |
+
return "US scheduler started in _ensure_background_services_started"
|
| 450 |
+
|
| 451 |
+
check("8.3 _ensure_background_services_started starts US scheduler", test_ensure_starts_us_scheduler)
|
| 452 |
+
|
| 453 |
+
def test_us_scan_hour():
|
| 454 |
+
"""Verify US scan hour is 17:00 ET."""
|
| 455 |
+
app_src = Path("app.py").read_text()
|
| 456 |
+
assert "US_SCAN_HOUR_ET = 17" in app_src, "US_SCAN_HOUR_ET not 17"
|
| 457 |
+
assert "America/New_York" in app_src
|
| 458 |
+
return "US_SCAN_HOUR_ET=17, America/New_York timezone"
|
| 459 |
+
|
| 460 |
+
check("8.4 US scan scheduled at 17:00 ET", test_us_scan_hour)
|
| 461 |
+
|
| 462 |
+
# ─────────────────────────────────────────────
|
| 463 |
+
# SECTION 9: run_us_scan module
|
| 464 |
+
# ─────────────────────────────────────────────
|
| 465 |
+
print("\n" + "="*60)
|
| 466 |
+
print("SECTION 9: run_us_scan.py module integrity")
|
| 467 |
+
print("="*60)
|
| 468 |
+
|
| 469 |
+
def test_run_us_scan_importable():
|
| 470 |
+
import run_us_scan
|
| 471 |
+
assert hasattr(run_us_scan, "run_scan")
|
| 472 |
+
assert hasattr(run_us_scan, "get_universe")
|
| 473 |
+
assert hasattr(run_us_scan, "stage1_prefilter")
|
| 474 |
+
assert hasattr(run_us_scan, "stage2_backtest")
|
| 475 |
+
assert hasattr(run_us_scan, "FINAL_RESULTS_FILE")
|
| 476 |
+
assert "markets/us/scan_results.json" in str(run_us_scan.FINAL_RESULTS_FILE)
|
| 477 |
+
return "all required functions present"
|
| 478 |
+
|
| 479 |
+
check("9.1 run_us_scan.py importable with correct API", test_run_us_scan_importable)
|
| 480 |
+
|
| 481 |
+
def test_run_us_scan_stage1_single():
|
| 482 |
+
"""Run stage1_prefilter on AAPL (fast, just yfinance download)."""
|
| 483 |
+
from run_us_scan import stage1_prefilter
|
| 484 |
+
result = stage1_prefilter("AAPL")
|
| 485 |
+
assert "passed" in result
|
| 486 |
+
assert "avg_volume" in result
|
| 487 |
+
assert "last_price" in result
|
| 488 |
+
assert result.get("symbol") == "AAPL"
|
| 489 |
+
passed = result.get("passed")
|
| 490 |
+
price = result.get("last_price", 0)
|
| 491 |
+
vol = result.get("avg_volume", 0)
|
| 492 |
+
if not passed:
|
| 493 |
+
raise AssertionError(f"AAPL failed stage1: {result.get('reason')} (price={price}, vol={vol})")
|
| 494 |
+
return f"AAPL passed: price=${price:.0f}, avg_vol={vol:,.0f}"
|
| 495 |
+
|
| 496 |
+
check("9.2 stage1_prefilter(AAPL) passes", test_run_us_scan_stage1_single)
|
| 497 |
+
|
| 498 |
+
def test_run_us_scan_run_fn_signature():
|
| 499 |
+
"""run_scan signature must match what app.py calls."""
|
| 500 |
+
import inspect, run_us_scan
|
| 501 |
+
sig = inspect.signature(run_us_scan.run_scan)
|
| 502 |
+
params = list(sig.parameters.keys())
|
| 503 |
+
assert "universe" in params, f"missing 'universe' param: {params}"
|
| 504 |
+
assert "force" in params, f"missing 'force' param: {params}"
|
| 505 |
+
assert "stage1_only" in params, f"missing 'stage1_only' param: {params}"
|
| 506 |
+
defaults = {k: v.default for k, v in sig.parameters.items() if v.default is not inspect.Parameter.empty}
|
| 507 |
+
assert defaults.get("universe") == "sp100", f"default universe={defaults.get('universe')}"
|
| 508 |
+
return f"signature OK: {params}"
|
| 509 |
+
|
| 510 |
+
check("9.3 run_scan signature matches app.py call", test_run_us_scan_stage1_single_sig := test_run_us_scan_run_fn_signature)
|
| 511 |
+
|
| 512 |
+
# ─────────────────────────────────────────────
|
| 513 |
+
# SECTION 10: Portfolio Isolation
|
| 514 |
+
# ─────────────────────────────────────────────
|
| 515 |
+
print("\n" + "="*60)
|
| 516 |
+
print("SECTION 10: BIST vs US Portfolio Isolation")
|
| 517 |
+
print("="*60)
|
| 518 |
+
|
| 519 |
+
def test_separate_db_files():
|
| 520 |
+
bist_db = Path("paper_trading/trading.db")
|
| 521 |
+
us_db = Path("paper_trading/markets/us/trading.db")
|
| 522 |
+
assert bist_db.exists(), "BIST trading.db missing"
|
| 523 |
+
assert us_db.exists(), "US trading.db missing"
|
| 524 |
+
assert bist_db != us_db, "Same DB file for both markets!"
|
| 525 |
+
return f"BIST: {bist_db}, US: {us_db}"
|
| 526 |
+
|
| 527 |
+
check("10.1 Separate trading.db files for each market", test_separate_db_files)
|
| 528 |
+
|
| 529 |
+
def test_separate_state_files():
|
| 530 |
+
bist_state = Path("paper_trading/auto_trader/state.json")
|
| 531 |
+
us_state = Path("paper_trading/markets/us/auto_trader/state.json")
|
| 532 |
+
if not bist_state.exists():
|
| 533 |
+
return "WARN: BIST state.json missing"
|
| 534 |
+
if not us_state.exists():
|
| 535 |
+
raise AssertionError("US state.json missing")
|
| 536 |
+
bist_data = json.loads(bist_state.read_text())
|
| 537 |
+
us_data = json.loads(us_state.read_text())
|
| 538 |
+
b_cash = bist_data.get("broker_cash", 0)
|
| 539 |
+
u_cash = us_data.get("broker_cash", 0)
|
| 540 |
+
# Verify market_id tags
|
| 541 |
+
b_mkt = bist_data.get("market_id", "bist")
|
| 542 |
+
u_mkt = us_data.get("market_id", "?")
|
| 543 |
+
assert u_mkt == "us", f"US state market_id={u_mkt}"
|
| 544 |
+
return f"BIST cash=₺{b_cash:,.0f} ({b_mkt}), US cash=${u_cash:,.0f} ({u_mkt})"
|
| 545 |
+
|
| 546 |
+
check("10.2 Separate state.json files with correct market_id", test_separate_state_files)
|
| 547 |
+
|
| 548 |
+
def test_no_state_leakage():
|
| 549 |
+
"""BIST state should not exist inside US directory and vice versa."""
|
| 550 |
+
us_dir = Path("paper_trading/markets/us")
|
| 551 |
+
# Should not have bist100_scan_results.json inside US dir
|
| 552 |
+
wrong_files = ["bist100_scan_results.json", "bist100_scan_results_work.json"]
|
| 553 |
+
found = [f for f in wrong_files if (us_dir / f).exists()]
|
| 554 |
+
if found:
|
| 555 |
+
raise AssertionError(f"BIST scan files in US dir: {found}")
|
| 556 |
+
# US scan file should not be in BIST root
|
| 557 |
+
bist_root = Path("paper_trading")
|
| 558 |
+
us_scan_in_root = (bist_root / "scan_results.json").exists()
|
| 559 |
+
if us_scan_in_root:
|
| 560 |
+
return "WARN: scan_results.json exists in BIST root (could be US file leakage)"
|
| 561 |
+
return "No cross-market file leakage"
|
| 562 |
+
|
| 563 |
+
check("10.3 No cross-market file contamination", test_no_state_leakage)
|
| 564 |
+
|
| 565 |
+
# ─────────────────────────────────────────────
|
| 566 |
+
# SUMMARY
|
| 567 |
+
# ─────────────────────────────────────────────
|
| 568 |
+
print("\n" + "="*60)
|
| 569 |
+
print("AUDIT SUMMARY")
|
| 570 |
+
print("="*60)
|
| 571 |
+
|
| 572 |
+
passed = [r for r in results if r[0] == PASS]
|
| 573 |
+
warned = [r for r in results if r[0] == WARN]
|
| 574 |
+
failed = [r for r in results if r[0] == FAIL]
|
| 575 |
+
|
| 576 |
+
print(f"\n{PASS} {len(passed)} passed")
|
| 577 |
+
print(f"{WARN} {len(warned)} warnings")
|
| 578 |
+
print(f"{FAIL} {len(failed)} failed")
|
| 579 |
+
print()
|
| 580 |
+
|
| 581 |
+
if warned:
|
| 582 |
+
print("WARNINGS:")
|
| 583 |
+
for _, name, msg in warned:
|
| 584 |
+
print(f" {WARN} {name}: {msg}")
|
| 585 |
+
print()
|
| 586 |
+
|
| 587 |
+
if failed:
|
| 588 |
+
print("FAILURES:")
|
| 589 |
+
for _, name, msg in failed:
|
| 590 |
+
print(f" {FAIL} {name}: {msg}")
|
| 591 |
+
print()
|
| 592 |
+
|
| 593 |
+
all_r = [(s, n, m) for s, n, m in results]
|
| 594 |
+
for s, n, m in all_r:
|
| 595 |
+
print(f" {s} {n}: {m}")
|
huggingface-space/ai/predictions_api.py
CHANGED
|
@@ -79,35 +79,31 @@ def _compute_confidence(r2: float, direction_correct: float) -> float:
|
|
| 79 |
r2_c = float(r2) if np.isfinite(r2) else 0.0
|
| 80 |
dir_c = float(direction_correct) if np.isfinite(direction_correct) else 0.5
|
| 81 |
|
| 82 |
-
#
|
| 83 |
-
|
| 84 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
r2_norm = min(1.0, r2_c / 0.25)
|
| 86 |
else:
|
| 87 |
-
|
| 88 |
-
# R²=-0.5→0.10, R²=-2.0→0.0
|
| 89 |
-
r2_norm = max(0.0, 0.20 + r2_c * 0.10)
|
| 90 |
|
| 91 |
-
# ── Direction component (
|
| 92 |
-
#
|
| 93 |
-
if dir_c
|
|
|
|
|
|
|
| 94 |
dir_norm = 1.0
|
| 95 |
-
elif dir_c >= 0.50:
|
| 96 |
-
dir_norm = 0.2 + (dir_c - 0.50) / 0.15 * 0.8
|
| 97 |
else:
|
| 98 |
-
|
| 99 |
-
dir_norm = max(0.0, dir_c / 0.50 * 0.2)
|
| 100 |
-
|
| 101 |
-
# ── Base component (30% weight) ──
|
| 102 |
-
# Having a trained multi-model ensemble is itself valuable.
|
| 103 |
-
# This ensures minimum ~25-30 confidence for any working model.
|
| 104 |
-
base = 0.85
|
| 105 |
|
| 106 |
-
raw = r2_norm * 0.
|
| 107 |
|
| 108 |
-
return max(
|
| 109 |
except Exception:
|
| 110 |
-
return
|
| 111 |
|
| 112 |
|
| 113 |
def _apply_shrinkage(predicted_change_pct: float, confidence_pct: float, days_ahead: int) -> float:
|
|
@@ -330,6 +326,10 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 330 |
# Current price is the most recent close; prediction is for +days_ahead
|
| 331 |
current_price = float(df['Close'].iloc[-1])
|
| 332 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 333 |
# Final safety: ensure finite values for sklearn
|
| 334 |
X = np.nan_to_num(X, nan=0.0, posinf=0.0, neginf=0.0)
|
| 335 |
y = np.nan_to_num(y, nan=0.0, posinf=0.0, neginf=0.0)
|
|
@@ -345,66 +345,108 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 345 |
val_start = split_idx # fallback if not enough data for purge
|
| 346 |
X_train, X_test = X_scaled[:split_idx], X_scaled[val_start:]
|
| 347 |
y_train, y_test = y[:split_idx], y[val_start:]
|
| 348 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 349 |
# Model seç ve eğit
|
| 350 |
if model_type == 'xgboost' and XGBOOST_AVAILABLE:
|
| 351 |
model = xgb.XGBRegressor(
|
| 352 |
-
n_estimators=
|
| 353 |
-
max_depth=
|
| 354 |
-
learning_rate=0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 355 |
random_state=42,
|
| 356 |
-
n_jobs=-1
|
| 357 |
)
|
| 358 |
elif model_type == 'lightgbm' and LIGHTGBM_AVAILABLE:
|
| 359 |
model = lgb.LGBMRegressor(
|
| 360 |
-
n_estimators=
|
| 361 |
-
max_depth=
|
| 362 |
-
learning_rate=0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 363 |
random_state=42,
|
| 364 |
n_jobs=-1,
|
| 365 |
-
verbose=-1
|
| 366 |
)
|
| 367 |
elif model_type == 'rf':
|
| 368 |
model = RandomForestRegressor(
|
| 369 |
-
n_estimators=
|
| 370 |
-
max_depth=
|
|
|
|
|
|
|
|
|
|
| 371 |
random_state=42,
|
| 372 |
-
n_jobs=-1
|
| 373 |
)
|
| 374 |
elif model_type == 'ensemble':
|
| 375 |
-
# Ensemble: XGBoost + LightGBM + RandomForest
|
| 376 |
models = []
|
| 377 |
|
| 378 |
if XGBOOST_AVAILABLE:
|
| 379 |
xgb_model = xgb.XGBRegressor(
|
| 380 |
-
n_estimators=
|
| 381 |
-
max_depth=
|
| 382 |
-
learning_rate=0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 383 |
random_state=42,
|
| 384 |
-
n_jobs=-1
|
| 385 |
)
|
| 386 |
-
xgb_model.fit(X_train, y_train)
|
| 387 |
models.append(xgb_model)
|
| 388 |
|
| 389 |
if LIGHTGBM_AVAILABLE:
|
| 390 |
lgb_model = lgb.LGBMRegressor(
|
| 391 |
-
n_estimators=
|
| 392 |
-
max_depth=
|
| 393 |
-
learning_rate=0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 394 |
random_state=42,
|
| 395 |
n_jobs=-1,
|
| 396 |
-
verbose=-1
|
| 397 |
)
|
| 398 |
-
lgb_model.fit(X_train, y_train)
|
| 399 |
models.append(lgb_model)
|
| 400 |
|
| 401 |
rf_model = RandomForestRegressor(
|
| 402 |
-
n_estimators=
|
| 403 |
-
max_depth=
|
|
|
|
|
|
|
|
|
|
| 404 |
random_state=42,
|
| 405 |
-
n_jobs=-1
|
| 406 |
)
|
| 407 |
-
rf_model.fit(X_train, y_train)
|
| 408 |
models.append(rf_model)
|
| 409 |
|
| 410 |
# Ensemble tahmin: ortalama
|
|
@@ -433,15 +475,18 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 433 |
|
| 434 |
confidence_pct = _compute_confidence(r2, direction_correct)
|
| 435 |
|
| 436 |
-
# Son tahmin (latest bar)
|
| 437 |
last_features_raw = np.nan_to_num(X_all[-1].reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0)
|
| 438 |
-
last_features = scaler_X.transform(last_features_raw)
|
| 439 |
predictions_final = np.array([
|
| 440 |
float(np.asarray(m.predict(last_features), dtype=float).ravel()[0])
|
| 441 |
for m in models
|
| 442 |
], dtype=float)
|
| 443 |
predicted_change = float(predictions_final.mean()) # already % return
|
| 444 |
|
|
|
|
|
|
|
|
|
|
| 445 |
# Bayesian shrinkage: dampen extreme predictions when confidence is low
|
| 446 |
predicted_change = _apply_shrinkage(predicted_change, confidence_pct, days_ahead)
|
| 447 |
predicted_price = current_price * (1 + predicted_change / 100)
|
|
@@ -469,18 +514,23 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 469 |
return res
|
| 470 |
else:
|
| 471 |
# Fallback to GradientBoosting if no advanced models
|
| 472 |
-
model = GradientBoostingRegressor(
|
|
|
|
|
|
|
|
|
|
| 473 |
else:
|
| 474 |
# Default: GradientBoosting
|
| 475 |
model = GradientBoostingRegressor(
|
| 476 |
-
n_estimators=
|
| 477 |
-
max_depth=
|
| 478 |
-
learning_rate=0.
|
| 479 |
-
|
|
|
|
|
|
|
| 480 |
)
|
| 481 |
|
| 482 |
# Tek model durumunda eğitim
|
| 483 |
-
model.fit(X_train, y_train)
|
| 484 |
|
| 485 |
# Test seti üzerinde performans (target is % return, no inverse transform)
|
| 486 |
y_pred_test = np.asarray(model.predict(X_test), dtype=float)
|
|
@@ -505,11 +555,14 @@ def predict_stock_for_api(symbol, days_ahead=5, model_type='ensemble', market_id
|
|
| 505 |
# Convert to a UI-friendly 0-100 confidence score
|
| 506 |
confidence_pct = _compute_confidence(r2, direction_correct)
|
| 507 |
|
| 508 |
-
# Tahmin yap (latest bar)
|
| 509 |
last_features_raw = np.nan_to_num(X_all[-1].reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0)
|
| 510 |
-
last_features = scaler_X.transform(last_features_raw)
|
| 511 |
predicted_change = float(np.asarray(model.predict(last_features), dtype=float).ravel()[0]) # already % return
|
| 512 |
|
|
|
|
|
|
|
|
|
|
| 513 |
# Bayesian shrinkage: dampen extreme predictions when confidence is low
|
| 514 |
predicted_change = _apply_shrinkage(predicted_change, confidence_pct, days_ahead)
|
| 515 |
predicted_price = current_price * (1 + predicted_change / 100)
|
|
|
|
| 79 |
r2_c = float(r2) if np.isfinite(r2) else 0.0
|
| 80 |
dir_c = float(direction_correct) if np.isfinite(direction_correct) else 0.5
|
| 81 |
|
| 82 |
+
# Hard floor: if model is no better than random, confidence = 0
|
| 83 |
+
if r2_c <= 0 and dir_c <= 0.52:
|
| 84 |
+
return 0.0
|
| 85 |
+
|
| 86 |
+
# ── R² component (40% weight) ──
|
| 87 |
+
# Financial R²: 0.0→0, 0.05→0.40, 0.15→0.80, 0.25→1.0
|
| 88 |
+
if r2_c > 0:
|
| 89 |
r2_norm = min(1.0, r2_c / 0.25)
|
| 90 |
else:
|
| 91 |
+
r2_norm = 0.0
|
|
|
|
|
|
|
| 92 |
|
| 93 |
+
# ── Direction component (60% weight) ──
|
| 94 |
+
# ≤52%→0, 55%→0.30, 60%→0.67, 65%→1.0
|
| 95 |
+
if dir_c <= 0.52:
|
| 96 |
+
dir_norm = 0.0
|
| 97 |
+
elif dir_c >= 0.65:
|
| 98 |
dir_norm = 1.0
|
|
|
|
|
|
|
| 99 |
else:
|
| 100 |
+
dir_norm = (dir_c - 0.52) / (0.65 - 0.52)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
|
| 102 |
+
raw = r2_norm * 0.40 + dir_norm * 0.60
|
| 103 |
|
| 104 |
+
return max(0.0, min(95.0, raw * 100.0))
|
| 105 |
except Exception:
|
| 106 |
+
return 0.0
|
| 107 |
|
| 108 |
|
| 109 |
def _apply_shrinkage(predicted_change_pct: float, confidence_pct: float, days_ahead: int) -> float:
|
|
|
|
| 326 |
# Current price is the most recent close; prediction is for +days_ahead
|
| 327 |
current_price = float(df['Close'].iloc[-1])
|
| 328 |
|
| 329 |
+
# ── Target clipping: cap extreme returns to prevent outlier-driven training ──
|
| 330 |
+
_clip_limit = 3.5 * float(np.sqrt(max(1, days_ahead))) # ~9% for 7 days
|
| 331 |
+
y = np.clip(y, -_clip_limit, _clip_limit)
|
| 332 |
+
|
| 333 |
# Final safety: ensure finite values for sklearn
|
| 334 |
X = np.nan_to_num(X, nan=0.0, posinf=0.0, neginf=0.0)
|
| 335 |
y = np.nan_to_num(y, nan=0.0, posinf=0.0, neginf=0.0)
|
|
|
|
| 345 |
val_start = split_idx # fallback if not enough data for purge
|
| 346 |
X_train, X_test = X_scaled[:split_idx], X_scaled[val_start:]
|
| 347 |
y_train, y_test = y[:split_idx], y[val_start:]
|
| 348 |
+
|
| 349 |
+
# ── Sample weighting: exponential recency (recent data 3x more important) ──
|
| 350 |
+
_n_train = len(X_train)
|
| 351 |
+
_sample_weights = np.exp(np.linspace(-1.0, 0.0, _n_train))
|
| 352 |
+
|
| 353 |
+
# ── Feature importance selection: reduce overfitting from high dimensionality ──
|
| 354 |
+
_n_keep = min(10, X_train.shape[1])
|
| 355 |
+
_selector_rf = RandomForestRegressor(
|
| 356 |
+
n_estimators=50, max_depth=4, min_samples_leaf=5,
|
| 357 |
+
max_features='sqrt', random_state=42, n_jobs=-1,
|
| 358 |
+
)
|
| 359 |
+
_selector_rf.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 360 |
+
_importances = _selector_rf.feature_importances_
|
| 361 |
+
_top_feat_idx = np.argsort(_importances)[-_n_keep:]
|
| 362 |
+
X_train = X_train[:, _top_feat_idx]
|
| 363 |
+
X_test = X_test[:, _top_feat_idx]
|
| 364 |
+
|
| 365 |
# Model seç ve eğit
|
| 366 |
if model_type == 'xgboost' and XGBOOST_AVAILABLE:
|
| 367 |
model = xgb.XGBRegressor(
|
| 368 |
+
n_estimators=200,
|
| 369 |
+
max_depth=3,
|
| 370 |
+
learning_rate=0.03,
|
| 371 |
+
subsample=0.8,
|
| 372 |
+
colsample_bytree=0.7,
|
| 373 |
+
reg_alpha=0.1,
|
| 374 |
+
reg_lambda=1.0,
|
| 375 |
+
min_child_weight=5,
|
| 376 |
random_state=42,
|
| 377 |
+
n_jobs=-1,
|
| 378 |
)
|
| 379 |
elif model_type == 'lightgbm' and LIGHTGBM_AVAILABLE:
|
| 380 |
model = lgb.LGBMRegressor(
|
| 381 |
+
n_estimators=200,
|
| 382 |
+
max_depth=3,
|
| 383 |
+
learning_rate=0.03,
|
| 384 |
+
subsample=0.8,
|
| 385 |
+
colsample_bytree=0.7,
|
| 386 |
+
reg_alpha=0.1,
|
| 387 |
+
reg_lambda=1.0,
|
| 388 |
+
min_child_samples=10,
|
| 389 |
random_state=42,
|
| 390 |
n_jobs=-1,
|
| 391 |
+
verbose=-1,
|
| 392 |
)
|
| 393 |
elif model_type == 'rf':
|
| 394 |
model = RandomForestRegressor(
|
| 395 |
+
n_estimators=200,
|
| 396 |
+
max_depth=3,
|
| 397 |
+
min_samples_split=10,
|
| 398 |
+
min_samples_leaf=5,
|
| 399 |
+
max_features='sqrt',
|
| 400 |
random_state=42,
|
| 401 |
+
n_jobs=-1,
|
| 402 |
)
|
| 403 |
elif model_type == 'ensemble':
|
| 404 |
+
# Ensemble: XGBoost + LightGBM + RandomForest (regularized)
|
| 405 |
models = []
|
| 406 |
|
| 407 |
if XGBOOST_AVAILABLE:
|
| 408 |
xgb_model = xgb.XGBRegressor(
|
| 409 |
+
n_estimators=200,
|
| 410 |
+
max_depth=3,
|
| 411 |
+
learning_rate=0.03,
|
| 412 |
+
subsample=0.8,
|
| 413 |
+
colsample_bytree=0.7,
|
| 414 |
+
reg_alpha=0.1,
|
| 415 |
+
reg_lambda=1.0,
|
| 416 |
+
min_child_weight=5,
|
| 417 |
random_state=42,
|
| 418 |
+
n_jobs=-1,
|
| 419 |
)
|
| 420 |
+
xgb_model.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 421 |
models.append(xgb_model)
|
| 422 |
|
| 423 |
if LIGHTGBM_AVAILABLE:
|
| 424 |
lgb_model = lgb.LGBMRegressor(
|
| 425 |
+
n_estimators=200,
|
| 426 |
+
max_depth=3,
|
| 427 |
+
learning_rate=0.03,
|
| 428 |
+
subsample=0.8,
|
| 429 |
+
colsample_bytree=0.7,
|
| 430 |
+
reg_alpha=0.1,
|
| 431 |
+
reg_lambda=1.0,
|
| 432 |
+
min_child_samples=10,
|
| 433 |
random_state=42,
|
| 434 |
n_jobs=-1,
|
| 435 |
+
verbose=-1,
|
| 436 |
)
|
| 437 |
+
lgb_model.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 438 |
models.append(lgb_model)
|
| 439 |
|
| 440 |
rf_model = RandomForestRegressor(
|
| 441 |
+
n_estimators=200,
|
| 442 |
+
max_depth=3,
|
| 443 |
+
min_samples_split=10,
|
| 444 |
+
min_samples_leaf=5,
|
| 445 |
+
max_features='sqrt',
|
| 446 |
random_state=42,
|
| 447 |
+
n_jobs=-1,
|
| 448 |
)
|
| 449 |
+
rf_model.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 450 |
models.append(rf_model)
|
| 451 |
|
| 452 |
# Ensemble tahmin: ortalama
|
|
|
|
| 475 |
|
| 476 |
confidence_pct = _compute_confidence(r2, direction_correct)
|
| 477 |
|
| 478 |
+
# Son tahmin (latest bar — apply same feature selection)
|
| 479 |
last_features_raw = np.nan_to_num(X_all[-1].reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0)
|
| 480 |
+
last_features = scaler_X.transform(last_features_raw)[:, _top_feat_idx]
|
| 481 |
predictions_final = np.array([
|
| 482 |
float(np.asarray(m.predict(last_features), dtype=float).ravel()[0])
|
| 483 |
for m in models
|
| 484 |
], dtype=float)
|
| 485 |
predicted_change = float(predictions_final.mean()) # already % return
|
| 486 |
|
| 487 |
+
# Base shrinkage: models overfit, shrink 70% toward zero
|
| 488 |
+
predicted_change *= 0.30
|
| 489 |
+
|
| 490 |
# Bayesian shrinkage: dampen extreme predictions when confidence is low
|
| 491 |
predicted_change = _apply_shrinkage(predicted_change, confidence_pct, days_ahead)
|
| 492 |
predicted_price = current_price * (1 + predicted_change / 100)
|
|
|
|
| 514 |
return res
|
| 515 |
else:
|
| 516 |
# Fallback to GradientBoosting if no advanced models
|
| 517 |
+
model = GradientBoostingRegressor(
|
| 518 |
+
n_estimators=200, max_depth=3, learning_rate=0.03,
|
| 519 |
+
subsample=0.8, min_samples_split=10, random_state=42,
|
| 520 |
+
)
|
| 521 |
else:
|
| 522 |
# Default: GradientBoosting
|
| 523 |
model = GradientBoostingRegressor(
|
| 524 |
+
n_estimators=200,
|
| 525 |
+
max_depth=3,
|
| 526 |
+
learning_rate=0.03,
|
| 527 |
+
subsample=0.8,
|
| 528 |
+
min_samples_split=10,
|
| 529 |
+
random_state=42,
|
| 530 |
)
|
| 531 |
|
| 532 |
# Tek model durumunda eğitim
|
| 533 |
+
model.fit(X_train, y_train, sample_weight=_sample_weights)
|
| 534 |
|
| 535 |
# Test seti üzerinde performans (target is % return, no inverse transform)
|
| 536 |
y_pred_test = np.asarray(model.predict(X_test), dtype=float)
|
|
|
|
| 555 |
# Convert to a UI-friendly 0-100 confidence score
|
| 556 |
confidence_pct = _compute_confidence(r2, direction_correct)
|
| 557 |
|
| 558 |
+
# Tahmin yap (latest bar — apply same feature selection)
|
| 559 |
last_features_raw = np.nan_to_num(X_all[-1].reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0)
|
| 560 |
+
last_features = scaler_X.transform(last_features_raw)[:, _top_feat_idx]
|
| 561 |
predicted_change = float(np.asarray(model.predict(last_features), dtype=float).ravel()[0]) # already % return
|
| 562 |
|
| 563 |
+
# Base shrinkage: models overfit, shrink 70% toward zero
|
| 564 |
+
predicted_change *= 0.30
|
| 565 |
+
|
| 566 |
# Bayesian shrinkage: dampen extreme predictions when confidence is low
|
| 567 |
predicted_change = _apply_shrinkage(predicted_change, confidence_pct, days_ahead)
|
| 568 |
predicted_price = current_price * (1 + predicted_change / 100)
|
huggingface-space/analysis/walk_forward_backtest.py
CHANGED
|
@@ -297,6 +297,13 @@ def walk_forward_backtest(
|
|
| 297 |
df_feat = add_macro_features(df_feat)
|
| 298 |
df_feat["target_return"] = (df_feat["Close"].shift(-days_ahead) / df_feat["Close"] - 1) * 100.0
|
| 299 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 300 |
# Corporate action filter: poison target_return around suspected artifact days
|
| 301 |
# so the model never trains on contaminated bedelsiz/bedelli/temettu data.
|
| 302 |
_ca_suspect = flag_corp_action_days(df)
|
|
@@ -395,10 +402,13 @@ def walk_forward_backtest(
|
|
| 395 |
X_test_s = scaler.transform(np.nan_to_num(X_test, nan=0.0, posinf=0.0, neginf=0.0))
|
| 396 |
|
| 397 |
# --- Feature importance selection: train quick RF, keep top features ---
|
| 398 |
-
_sel_rf = RandomForestRegressor(
|
|
|
|
|
|
|
|
|
|
| 399 |
_sel_rf.fit(X_train_s, y_train)
|
| 400 |
importances = _sel_rf.feature_importances_
|
| 401 |
-
n_keep = min(
|
| 402 |
top_idx = np.argsort(importances)[-n_keep:]
|
| 403 |
X_train_s = X_train_s[:, top_idx]
|
| 404 |
X_test_s = X_test_s[:, top_idx]
|
|
@@ -413,10 +423,14 @@ def walk_forward_backtest(
|
|
| 413 |
|
| 414 |
# --- Ensemble of classifiers ---
|
| 415 |
clf_rf = RandomForestClassifier(
|
| 416 |
-
n_estimators=200, max_depth=
|
|
|
|
|
|
|
| 417 |
)
|
| 418 |
clf_gb = GradientBoostingClassifier(
|
| 419 |
-
n_estimators=
|
|
|
|
|
|
|
| 420 |
)
|
| 421 |
clf_rf.fit(X_train_s, y_train_cls, sample_weight=sample_weights)
|
| 422 |
clf_gb.fit(X_train_s, y_train_cls, sample_weight=sample_weights)
|
|
@@ -435,9 +449,17 @@ def walk_forward_backtest(
|
|
| 435 |
# Also train regression model for magnitude estimate
|
| 436 |
reg_model: Any
|
| 437 |
if str(model_type).lower() == "rf":
|
| 438 |
-
reg_model = RandomForestRegressor(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 439 |
else:
|
| 440 |
-
reg_model = GradientBoostingRegressor(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 441 |
reg_model.fit(X_train_s, y_train, sample_weight=sample_weights)
|
| 442 |
y_pred_reg = np.asarray(reg_model.predict(X_test_s), dtype=float)
|
| 443 |
r2 = float(r2_score(y_test, y_pred_reg))
|
|
@@ -457,6 +479,7 @@ def walk_forward_backtest(
|
|
| 457 |
X_pred_row = scaler.transform(np.nan_to_num(row_t[FEATURES].to_numpy(dtype=float).reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0))
|
| 458 |
X_pred_sel = X_pred_row[:, top_idx]
|
| 459 |
reg_pred = float(np.asarray(reg_model.predict(X_pred_sel), dtype=float).ravel()[0])
|
|
|
|
| 460 |
predicted_change = float(_apply_shrinkage(reg_pred, confidence_pct, days_ahead))
|
| 461 |
ml_signal = "HOLD" # Model not confident enough
|
| 462 |
_current_prob_up = 0.5
|
|
@@ -472,6 +495,7 @@ def walk_forward_backtest(
|
|
| 472 |
|
| 473 |
# Regression for magnitude
|
| 474 |
reg_pred = float(np.asarray(reg_model.predict(X_pred_sel), dtype=float).ravel()[0])
|
|
|
|
| 475 |
predicted_change = float(_apply_shrinkage(reg_pred, confidence_pct, days_ahead))
|
| 476 |
|
| 477 |
# Signal from classification probability — RAISED thresholds for higher conviction
|
|
|
|
| 297 |
df_feat = add_macro_features(df_feat)
|
| 298 |
df_feat["target_return"] = (df_feat["Close"].shift(-days_ahead) / df_feat["Close"] - 1) * 100.0
|
| 299 |
|
| 300 |
+
# Target clipping: cap extreme returns to prevent outlier-driven training
|
| 301 |
+
_target_clip = 3.5 * float(np.sqrt(max(1, days_ahead))) # ~9% for 7 days
|
| 302 |
+
_extreme_mask = df_feat["target_return"].abs() > _target_clip
|
| 303 |
+
df_feat.loc[_extreme_mask, "target_return"] = np.clip(
|
| 304 |
+
df_feat.loc[_extreme_mask, "target_return"], -_target_clip, _target_clip,
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
# Corporate action filter: poison target_return around suspected artifact days
|
| 308 |
# so the model never trains on contaminated bedelsiz/bedelli/temettu data.
|
| 309 |
_ca_suspect = flag_corp_action_days(df)
|
|
|
|
| 402 |
X_test_s = scaler.transform(np.nan_to_num(X_test, nan=0.0, posinf=0.0, neginf=0.0))
|
| 403 |
|
| 404 |
# --- Feature importance selection: train quick RF, keep top features ---
|
| 405 |
+
_sel_rf = RandomForestRegressor(
|
| 406 |
+
n_estimators=50, max_depth=4, min_samples_leaf=5,
|
| 407 |
+
max_features='sqrt', random_state=42, n_jobs=-1,
|
| 408 |
+
)
|
| 409 |
_sel_rf.fit(X_train_s, y_train)
|
| 410 |
importances = _sel_rf.feature_importances_
|
| 411 |
+
n_keep = min(10, len(FEATURES))
|
| 412 |
top_idx = np.argsort(importances)[-n_keep:]
|
| 413 |
X_train_s = X_train_s[:, top_idx]
|
| 414 |
X_test_s = X_test_s[:, top_idx]
|
|
|
|
| 423 |
|
| 424 |
# --- Ensemble of classifiers ---
|
| 425 |
clf_rf = RandomForestClassifier(
|
| 426 |
+
n_estimators=200, max_depth=3, min_samples_split=10,
|
| 427 |
+
min_samples_leaf=5, max_features='sqrt',
|
| 428 |
+
random_state=42, n_jobs=-1, class_weight="balanced",
|
| 429 |
)
|
| 430 |
clf_gb = GradientBoostingClassifier(
|
| 431 |
+
n_estimators=200, max_depth=3, learning_rate=0.03,
|
| 432 |
+
subsample=0.8, min_samples_split=10,
|
| 433 |
+
random_state=42,
|
| 434 |
)
|
| 435 |
clf_rf.fit(X_train_s, y_train_cls, sample_weight=sample_weights)
|
| 436 |
clf_gb.fit(X_train_s, y_train_cls, sample_weight=sample_weights)
|
|
|
|
| 449 |
# Also train regression model for magnitude estimate
|
| 450 |
reg_model: Any
|
| 451 |
if str(model_type).lower() == "rf":
|
| 452 |
+
reg_model = RandomForestRegressor(
|
| 453 |
+
n_estimators=200, max_depth=3, min_samples_split=10,
|
| 454 |
+
min_samples_leaf=5, max_features='sqrt',
|
| 455 |
+
random_state=42, n_jobs=-1,
|
| 456 |
+
)
|
| 457 |
else:
|
| 458 |
+
reg_model = GradientBoostingRegressor(
|
| 459 |
+
n_estimators=200, max_depth=3, learning_rate=0.03,
|
| 460 |
+
subsample=0.8, min_samples_split=10,
|
| 461 |
+
random_state=42,
|
| 462 |
+
)
|
| 463 |
reg_model.fit(X_train_s, y_train, sample_weight=sample_weights)
|
| 464 |
y_pred_reg = np.asarray(reg_model.predict(X_test_s), dtype=float)
|
| 465 |
r2 = float(r2_score(y_test, y_pred_reg))
|
|
|
|
| 479 |
X_pred_row = scaler.transform(np.nan_to_num(row_t[FEATURES].to_numpy(dtype=float).reshape(1, -1), nan=0.0, posinf=0.0, neginf=0.0))
|
| 480 |
X_pred_sel = X_pred_row[:, top_idx]
|
| 481 |
reg_pred = float(np.asarray(reg_model.predict(X_pred_sel), dtype=float).ravel()[0])
|
| 482 |
+
reg_pred *= 0.30 # Base shrinkage: 70% toward zero
|
| 483 |
predicted_change = float(_apply_shrinkage(reg_pred, confidence_pct, days_ahead))
|
| 484 |
ml_signal = "HOLD" # Model not confident enough
|
| 485 |
_current_prob_up = 0.5
|
|
|
|
| 495 |
|
| 496 |
# Regression for magnitude
|
| 497 |
reg_pred = float(np.asarray(reg_model.predict(X_pred_sel), dtype=float).ravel()[0])
|
| 498 |
+
reg_pred *= 0.30 # Base shrinkage: 70% toward zero
|
| 499 |
predicted_change = float(_apply_shrinkage(reg_pred, confidence_pct, days_ahead))
|
| 500 |
|
| 501 |
# Signal from classification probability — RAISED thresholds for higher conviction
|
huggingface-space/app.py
CHANGED
|
@@ -2,29 +2,18 @@
|
|
| 2 |
Hugging Face Space: Borsa ML API
|
| 3 |
FastAPI backend for stock analysis and ML predictions
|
| 4 |
"""
|
| 5 |
-
# --- HF Space DNS fix: api.telegram.org DNS çözümü engellenmiş ---
|
| 6 |
-
import socket as _socket
|
| 7 |
-
_orig_getaddrinfo = _socket.getaddrinfo
|
| 8 |
-
def _tg_dns_fix(host, port, family=0, type=0, proto=0, flags=0):
|
| 9 |
-
if host == "api.telegram.org":
|
| 10 |
-
return _orig_getaddrinfo("149.154.167.220", port, family, type, proto, flags)
|
| 11 |
-
return _orig_getaddrinfo(host, port, family, type, proto, flags)
|
| 12 |
-
_socket.getaddrinfo = _tg_dns_fix
|
| 13 |
-
|
| 14 |
from fastapi import FastAPI, HTTPException, Query
|
| 15 |
from fastapi.middleware.cors import CORSMiddleware
|
| 16 |
from pydantic import BaseModel
|
| 17 |
-
from typing import
|
| 18 |
import sys
|
| 19 |
import os
|
| 20 |
import json
|
| 21 |
-
from datetime import datetime
|
| 22 |
from pathlib import Path
|
| 23 |
|
| 24 |
import math
|
| 25 |
|
| 26 |
-
from trading.market_registry import DEFAULT_MARKET_ID, get_scan_results_path, get_trading_db_path
|
| 27 |
-
|
| 28 |
# Add parent directory to path for imports
|
| 29 |
sys.path.insert(0, os.path.dirname(__file__))
|
| 30 |
|
|
@@ -65,7 +54,6 @@ _scan_status = {"running": False, "progress": "", "started_at": None}
|
|
| 65 |
|
| 66 |
# ─── Remote sync cache (survives ephemeral filesystem resets) ─────────
|
| 67 |
_remote_cache: dict = {}
|
| 68 |
-
_SUPPORTED_MARKETS = ("bist", "us")
|
| 69 |
|
| 70 |
# ─── Daily Auto-Scan Scheduler ────────────────────────
|
| 71 |
# Runs a full-market scan every day at 19:00 Turkey time (16:00 UTC)
|
|
@@ -74,387 +62,211 @@ _SUPPORTED_MARKETS = ("bist", "us")
|
|
| 74 |
import threading
|
| 75 |
import time as _time
|
| 76 |
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
Timing rationale:
|
| 87 |
-
- BIST market closes at 18:00 TR
|
| 88 |
-
- Scan starts at 19:00 TR (after market close)
|
| 89 |
-
- BIST100 takes ~5-6 hours → finishes by ~01:00 TR
|
| 90 |
-
- Market opens at 10:00 TR → 9 hours margin
|
| 91 |
-
|
| 92 |
-
On first boot (no scan results), runs an immediate BIST30 scan so the
|
| 93 |
-
trading worker has data within minutes instead of waiting until evening.
|
| 94 |
-
"""
|
| 95 |
global _scan_thread, _scan_status
|
| 96 |
-
from datetime import timezone, timedelta
|
| 97 |
-
from pathlib import Path
|
| 98 |
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 102 |
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
if _SCAN_FILE.exists():
|
| 106 |
-
# Check if scan results are too old (> 3 days → stale, needs rescan)
|
| 107 |
-
try:
|
| 108 |
-
scan_data = json.loads(_SCAN_FILE.read_text())
|
| 109 |
-
finished = scan_data.get("scan_finished") or scan_data.get("updated_at")
|
| 110 |
-
if finished:
|
| 111 |
-
scan_dt = datetime.fromisoformat(str(finished).replace("Z", "+00:00"))
|
| 112 |
-
age_hours = (datetime.now(timezone(timedelta(hours=0))) - scan_dt).total_seconds() / 3600
|
| 113 |
-
if age_hours > 72: # 3 days
|
| 114 |
-
scan_stale = True
|
| 115 |
-
print(f"[scheduler] Scan results stale ({age_hours:.0f}h old) — forcing rescan")
|
| 116 |
-
else:
|
| 117 |
-
print(f"[scheduler] Scan results fresh ({age_hours:.0f}h old)")
|
| 118 |
-
else:
|
| 119 |
-
scan_stale = True
|
| 120 |
-
print("[scheduler] Scan results have no timestamp — forcing rescan")
|
| 121 |
-
except Exception as e:
|
| 122 |
-
scan_stale = True
|
| 123 |
-
print(f"[scheduler] Error reading scan file: {e} — forcing rescan")
|
| 124 |
-
|
| 125 |
-
if not _SCAN_FILE.exists() or scan_stale:
|
| 126 |
-
reason = "stale" if scan_stale else "missing"
|
| 127 |
-
# Boot scan: BIST30 kullan (hızlı ~5dk). Gece taraması BIST100 yapar.
|
| 128 |
-
boot_universe = "bist30"
|
| 129 |
-
print(f"[scheduler] Scan results {reason} — running immediate {boot_universe} boot scan...")
|
| 130 |
-
_scan_status = {
|
| 131 |
-
"running": True,
|
| 132 |
-
"progress": f"Boot taraması başlatılıyor ({boot_universe}, {reason})...",
|
| 133 |
-
"started_at": datetime.now().isoformat(),
|
| 134 |
-
"universe": boot_universe,
|
| 135 |
-
"market": "bist",
|
| 136 |
-
"scheduled": True,
|
| 137 |
-
}
|
| 138 |
try:
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
pass
|
| 145 |
-
|
| 146 |
-
from run_bist100_scan import run_scan
|
| 147 |
-
_scan_status["progress"] = f"Stage 1: Likidite filtresi ({boot_universe})..."
|
| 148 |
-
run_scan(universe=boot_universe, force=False, stage1_only=False)
|
| 149 |
_scan_status = {
|
| 150 |
"running": False,
|
| 151 |
-
"progress": f"
|
| 152 |
"finished_at": datetime.now().isoformat(),
|
| 153 |
-
"universe":
|
| 154 |
-
"
|
| 155 |
"scheduled": True,
|
| 156 |
}
|
| 157 |
-
print(f"[scheduler]
|
| 158 |
-
|
| 159 |
-
# Telegram: tarama bitti bildirimi
|
| 160 |
-
try:
|
| 161 |
-
import telegram_bot as _tg_bot
|
| 162 |
-
if _SCAN_FILE.exists():
|
| 163 |
-
_tg_bot.notify_scan_complete(json.loads(_SCAN_FILE.read_text()))
|
| 164 |
-
except Exception:
|
| 165 |
-
pass
|
| 166 |
-
|
| 167 |
except Exception as e:
|
| 168 |
_scan_status = {
|
| 169 |
"running": False,
|
| 170 |
-
"progress": f"
|
| 171 |
"error": str(e),
|
| 172 |
-
"
|
| 173 |
-
|
| 174 |
-
print(f"[scheduler] Boot scan error: {e}")
|
| 175 |
-
# Telegram: hata bildirimi
|
| 176 |
-
try:
|
| 177 |
-
import telegram_bot as _tg_bot
|
| 178 |
-
_tg_bot.notify_error("Boot Tarama Hatası", str(e))
|
| 179 |
-
except Exception:
|
| 180 |
-
pass
|
| 181 |
-
|
| 182 |
-
# ── Nightly recurring scan loop (19:00 TR) ────────────────────────
|
| 183 |
-
while True:
|
| 184 |
-
try:
|
| 185 |
-
now_tr = datetime.now(TR_TZ)
|
| 186 |
-
|
| 187 |
-
# Calculate next 19:00 TR
|
| 188 |
-
target = now_tr.replace(hour=SCAN_HOUR_TR, minute=0, second=0, microsecond=0)
|
| 189 |
-
if now_tr >= target:
|
| 190 |
-
# Already past 19:00 today → schedule for tomorrow 19:00
|
| 191 |
-
target = target + timedelta(days=1)
|
| 192 |
-
|
| 193 |
-
wait_seconds = (target - now_tr).total_seconds()
|
| 194 |
-
print(f"[scheduler] Next auto-scan at {target.isoformat()} (in {wait_seconds/3600:.1f}h)")
|
| 195 |
-
_time.sleep(wait_seconds)
|
| 196 |
-
|
| 197 |
-
# Check if scan already running
|
| 198 |
-
if _scan_status.get("running"):
|
| 199 |
-
print("[scheduler] Scan already running, skipping scheduled scan")
|
| 200 |
-
continue
|
| 201 |
-
|
| 202 |
-
print("[scheduler] Starting daily ALL auto-scan...")
|
| 203 |
-
_scan_status = {
|
| 204 |
-
"running": True,
|
| 205 |
-
"progress": "Günlük otomatik tarama başlatılıyor (ALL)...",
|
| 206 |
-
"started_at": datetime.now().isoformat(),
|
| 207 |
-
"universe": "all",
|
| 208 |
-
"market": "bist",
|
| 209 |
"scheduled": True,
|
| 210 |
}
|
|
|
|
| 211 |
|
| 212 |
-
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
# Telegram: tarama başladı
|
| 216 |
-
try:
|
| 217 |
-
import telegram_bot as _tg_bot
|
| 218 |
-
_tg_bot.notify_scan_started()
|
| 219 |
-
except Exception:
|
| 220 |
-
pass
|
| 221 |
-
|
| 222 |
-
from run_bist100_scan import run_scan
|
| 223 |
-
_scan_status["progress"] = "Stage 1: Likidite filtresi..."
|
| 224 |
-
run_scan(universe="all", force=False, stage1_only=False)
|
| 225 |
-
_scan_status = {
|
| 226 |
-
"running": False,
|
| 227 |
-
"progress": "Günlük tarama tamamlandı!",
|
| 228 |
-
"finished_at": datetime.now().isoformat(),
|
| 229 |
-
"universe": "all",
|
| 230 |
-
"market": "bist",
|
| 231 |
-
"scheduled": True,
|
| 232 |
-
}
|
| 233 |
-
print(f"[scheduler] Daily scan completed at {datetime.now().isoformat()}")
|
| 234 |
-
|
| 235 |
-
# Telegram: tarama bitti
|
| 236 |
-
try:
|
| 237 |
-
import telegram_bot as _tg_bot
|
| 238 |
-
if _SCAN_FILE.exists():
|
| 239 |
-
_tg_bot.notify_scan_complete(json.loads(_SCAN_FILE.read_text()))
|
| 240 |
-
except Exception:
|
| 241 |
-
pass
|
| 242 |
|
| 243 |
-
except Exception as e:
|
| 244 |
-
_scan_status = {
|
| 245 |
-
"running": False,
|
| 246 |
-
"progress": f"Günlük tarama hatası: {e}",
|
| 247 |
-
"error": str(e),
|
| 248 |
-
"universe": "all",
|
| 249 |
-
"market": "bist",
|
| 250 |
-
"scheduled": True,
|
| 251 |
-
}
|
| 252 |
-
print(f"[scheduler] Daily scan error: {e}")
|
| 253 |
-
# Telegram: hata
|
| 254 |
-
try:
|
| 255 |
-
import telegram_bot as _tg_bot
|
| 256 |
-
_tg_bot.notify_error("Günlük Tarama Hatası", str(e))
|
| 257 |
-
except Exception:
|
| 258 |
-
pass
|
| 259 |
|
| 260 |
-
|
| 261 |
-
|
| 262 |
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 266 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 267 |
|
| 268 |
-
|
| 269 |
-
|
|
|
|
| 270 |
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
- Results ready well before next 09:30 ET market open
|
| 276 |
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
"""
|
| 280 |
-
global _scan_thread, _scan_status
|
| 281 |
-
from datetime import timedelta
|
| 282 |
-
from pathlib import Path
|
| 283 |
-
from zoneinfo import ZoneInfo
|
| 284 |
-
|
| 285 |
-
ET_TZ = ZoneInfo("America/New_York")
|
| 286 |
-
_US_SCAN_FILE = Path("paper_trading/markets/us/scan_results.json")
|
| 287 |
-
US_SCAN_HOUR_ET = 17
|
| 288 |
-
BOOT_UNIVERSE = "sp100"
|
| 289 |
-
|
| 290 |
-
# ── Initial boot scan ──────────────────────────────────────────────
|
| 291 |
-
scan_stale = False
|
| 292 |
-
if _US_SCAN_FILE.exists():
|
| 293 |
-
try:
|
| 294 |
-
scan_data = json.loads(_US_SCAN_FILE.read_text())
|
| 295 |
-
finished = scan_data.get("scan_finished") or scan_data.get("updated_at")
|
| 296 |
-
if finished:
|
| 297 |
-
scan_dt = datetime.fromisoformat(str(finished).replace("Z", "+00:00"))
|
| 298 |
-
age_hours = (datetime.now(timezone.utc) - scan_dt).total_seconds() / 3600
|
| 299 |
-
if age_hours > 72:
|
| 300 |
-
scan_stale = True
|
| 301 |
-
print(f"[us-scheduler] US scan results stale ({age_hours:.0f}h old) — forcing rescan")
|
| 302 |
-
else:
|
| 303 |
-
print(f"[us-scheduler] US scan results fresh ({age_hours:.0f}h old)")
|
| 304 |
-
else:
|
| 305 |
-
scan_stale = True
|
| 306 |
-
print("[us-scheduler] US scan results have no timestamp — forcing rescan")
|
| 307 |
-
except Exception as e:
|
| 308 |
-
scan_stale = True
|
| 309 |
-
print(f"[us-scheduler] Error reading US scan file: {e} — forcing rescan")
|
| 310 |
-
|
| 311 |
-
if not _US_SCAN_FILE.exists() or scan_stale:
|
| 312 |
-
reason = "stale" if scan_stale else "missing"
|
| 313 |
-
print(f"[us-scheduler] US scan results {reason} — running immediate {BOOT_UNIVERSE} boot scan...")
|
| 314 |
-
_scan_status = {
|
| 315 |
-
"running": True,
|
| 316 |
-
"progress": f"US boot taraması başlatılıyor ({BOOT_UNIVERSE}, {reason})...",
|
| 317 |
-
"started_at": datetime.now().isoformat(),
|
| 318 |
-
"universe": BOOT_UNIVERSE,
|
| 319 |
-
"market": "us",
|
| 320 |
-
"scheduled": True,
|
| 321 |
-
}
|
| 322 |
-
try:
|
| 323 |
-
from run_us_scan import run_scan as _us_run_scan
|
| 324 |
-
_scan_status["progress"] = f"US Stage 1: Liquidity filter ({BOOT_UNIVERSE})..."
|
| 325 |
-
_us_run_scan(universe=BOOT_UNIVERSE, force=False, stage1_only=False)
|
| 326 |
-
_scan_status = {
|
| 327 |
-
"running": False,
|
| 328 |
-
"progress": f"US boot taraması tamamlandı ({BOOT_UNIVERSE})!",
|
| 329 |
-
"finished_at": datetime.now().isoformat(),
|
| 330 |
-
"universe": BOOT_UNIVERSE,
|
| 331 |
-
"market": "us",
|
| 332 |
-
"scheduled": True,
|
| 333 |
-
}
|
| 334 |
-
print(f"[us-scheduler] US boot scan ({BOOT_UNIVERSE}) completed at {datetime.now().isoformat()}")
|
| 335 |
-
except Exception as e:
|
| 336 |
-
_scan_status = {
|
| 337 |
-
"running": False,
|
| 338 |
-
"progress": f"US boot taraması hatası: {e}",
|
| 339 |
-
"error": str(e),
|
| 340 |
-
"market": "us",
|
| 341 |
-
}
|
| 342 |
-
print(f"[us-scheduler] US boot scan error: {e}")
|
| 343 |
|
| 344 |
-
# ── Nightly recurring scan loop
|
| 345 |
while True:
|
| 346 |
try:
|
| 347 |
-
|
| 348 |
-
target = now_et.replace(hour=US_SCAN_HOUR_ET, minute=0, second=0, microsecond=0)
|
| 349 |
-
if now_et >= target:
|
| 350 |
-
target = target + timedelta(days=1)
|
| 351 |
-
|
| 352 |
-
wait_seconds = (target - now_et).total_seconds()
|
| 353 |
-
print(f"[us-scheduler] Next US auto-scan at {target.isoformat()} (in {wait_seconds/3600:.1f}h)")
|
| 354 |
-
_time.sleep(wait_seconds)
|
| 355 |
-
|
| 356 |
-
if _scan_status.get("running"):
|
| 357 |
-
print("[us-scheduler] Scan already running, skipping scheduled US scan")
|
| 358 |
-
continue
|
| 359 |
-
|
| 360 |
-
print("[us-scheduler] Starting daily US SP100 auto-scan...")
|
| 361 |
-
_scan_status = {
|
| 362 |
-
"running": True,
|
| 363 |
-
"progress": "US günlük otomatik tarama başlatılıyor (SP100)...",
|
| 364 |
-
"started_at": datetime.now().isoformat(),
|
| 365 |
-
"universe": "sp100",
|
| 366 |
-
"market": "us",
|
| 367 |
-
"scheduled": True,
|
| 368 |
-
}
|
| 369 |
|
| 370 |
-
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
|
| 374 |
-
|
| 375 |
-
|
| 376 |
-
|
| 377 |
-
|
| 378 |
-
|
| 379 |
-
|
| 380 |
-
|
| 381 |
-
|
| 382 |
-
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
|
| 387 |
-
|
| 388 |
-
|
| 389 |
-
"error": str(e),
|
| 390 |
-
"universe": "sp100",
|
| 391 |
-
"market": "us",
|
| 392 |
-
"scheduled": True,
|
| 393 |
-
}
|
| 394 |
-
print(f"[us-scheduler] Daily US scan error: {e}")
|
| 395 |
|
| 396 |
-
|
| 397 |
-
|
|
|
|
| 398 |
|
|
|
|
| 399 |
except Exception as e:
|
| 400 |
-
print(f"[
|
| 401 |
_time.sleep(3600)
|
| 402 |
|
| 403 |
-
|
| 404 |
-
|
| 405 |
-
|
| 406 |
-
|
| 407 |
-
Importing this module should stay side-effect free so smoke tests and helper
|
| 408 |
-
invocations do not unexpectedly launch scans or watchdog processes.
|
| 409 |
-
"""
|
| 410 |
-
global _scheduler_thread, _us_scheduler_thread, _watchdog_thread, _background_services_started
|
| 411 |
-
|
| 412 |
-
with _background_services_lock:
|
| 413 |
-
if _background_services_started:
|
| 414 |
-
return
|
| 415 |
-
|
| 416 |
-
if _scheduler_thread is None or not _scheduler_thread.is_alive():
|
| 417 |
-
_scheduler_thread = threading.Thread(target=_daily_scan_scheduler, daemon=True, name="scan-scheduler")
|
| 418 |
-
_scheduler_thread.start()
|
| 419 |
-
print("[scheduler] Daily auto-scan scheduler started (19:00 TR / 16:00 UTC — ALL)")
|
| 420 |
-
|
| 421 |
-
if _us_scheduler_thread is None or not _us_scheduler_thread.is_alive():
|
| 422 |
-
_us_scheduler_thread = threading.Thread(target=_daily_us_scan_scheduler, daemon=True, name="us-scan-scheduler")
|
| 423 |
-
_us_scheduler_thread.start()
|
| 424 |
-
print("[scheduler] US daily scan scheduler started (17:00 ET — SP100)")
|
| 425 |
-
|
| 426 |
-
if _watchdog_thread is None or not _watchdog_thread.is_alive():
|
| 427 |
-
_watchdog_thread = threading.Thread(target=_worker_watchdog, daemon=True, name="worker-watchdog")
|
| 428 |
-
_watchdog_thread.start()
|
| 429 |
-
print("[watchdog] Worker health watchdog started (checks every 5 min)")
|
| 430 |
-
|
| 431 |
-
_background_services_started = True
|
| 432 |
-
|
| 433 |
-
|
| 434 |
-
@app.on_event("startup")
|
| 435 |
-
def _startup_background_services() -> None:
|
| 436 |
-
_ensure_background_services_started()
|
| 437 |
|
| 438 |
|
| 439 |
def _get_trading_status():
|
| 440 |
"""Read trading status from file-based state."""
|
| 441 |
paper_dir = Path("paper_trading")
|
| 442 |
-
|
| 443 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 444 |
try:
|
| 445 |
-
return json.loads(
|
| 446 |
except Exception:
|
| 447 |
return None
|
| 448 |
|
| 449 |
-
def read_jsonl(
|
| 450 |
try:
|
| 451 |
-
if not
|
| 452 |
return []
|
| 453 |
-
return [json.loads(line) for line in
|
| 454 |
except Exception:
|
| 455 |
return []
|
| 456 |
|
| 457 |
-
def check_pid(
|
| 458 |
try:
|
| 459 |
if not pid_file.exists():
|
| 460 |
return False, None
|
|
@@ -464,233 +276,188 @@ def _get_trading_status():
|
|
| 464 |
except Exception:
|
| 465 |
return False, None
|
| 466 |
|
| 467 |
-
|
| 468 |
-
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
|
| 472 |
-
|
| 473 |
-
|
| 474 |
-
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
|
| 479 |
-
|
| 480 |
-
|
| 481 |
-
|
| 482 |
-
"scan": get_scan_results_path(market_key, completed=True),
|
| 483 |
-
"pid": auto_dir / "worker.pid",
|
| 484 |
-
"db": get_trading_db_path(market_key),
|
| 485 |
-
}
|
| 486 |
-
|
| 487 |
-
def market_cache(market_id: str) -> Dict[str, Any]:
|
| 488 |
-
markets = _remote_cache.get("markets") or {}
|
| 489 |
-
cached = markets.get(market_id)
|
| 490 |
-
return cached if isinstance(cached, dict) else {}
|
| 491 |
-
|
| 492 |
-
def build_market_snapshot(market_id: str) -> Dict[str, Any]:
|
| 493 |
-
paths = market_paths(market_id)
|
| 494 |
-
cache = market_cache(market_id)
|
| 495 |
-
|
| 496 |
-
state = cache.get("state") or read_json(paths["state"])
|
| 497 |
-
status = cache.get("status") or read_json(paths["status"])
|
| 498 |
-
trades = cache.get("trades") or read_jsonl(paths["trades"])
|
| 499 |
-
equity = cache.get("equity_curve") or read_jsonl(paths["equity"])
|
| 500 |
-
signals = cache.get("signals") or read_jsonl(paths["signals"])
|
| 501 |
-
scan_data = cache.get("scan_results") or read_json(paths["scan"])
|
| 502 |
-
kill_switch = read_json(paths["kill"])
|
| 503 |
-
worker_running, worker_pid = check_pid(paths["pid"])
|
| 504 |
-
|
| 505 |
-
if paths["db"].exists() and ((not trades) or (not equity) or (not signals)):
|
| 506 |
-
try:
|
| 507 |
-
from trading.db_store import TradingStore
|
| 508 |
-
|
| 509 |
-
store = TradingStore(db_path=str(paths["db"]))
|
| 510 |
-
if not trades:
|
| 511 |
-
trades = store.get_all_trades()
|
| 512 |
-
if not equity:
|
| 513 |
-
equity = store.get_equity_curve()
|
| 514 |
-
if not signals:
|
| 515 |
-
signals = store.get_signals(limit=500)
|
| 516 |
-
except Exception as exc:
|
| 517 |
-
status = status or {}
|
| 518 |
-
status["db_fallback_error"] = str(exc)
|
| 519 |
-
|
| 520 |
-
eligible = []
|
| 521 |
try:
|
| 522 |
-
|
| 523 |
-
|
| 524 |
-
|
| 525 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 526 |
|
| 527 |
-
|
| 528 |
-
|
| 529 |
-
|
| 530 |
-
|
| 531 |
-
|
| 532 |
-
|
| 533 |
-
|
| 534 |
-
|
| 535 |
-
|
| 536 |
-
|
| 537 |
-
|
| 538 |
-
|
| 539 |
-
|
| 540 |
-
|
| 541 |
-
|
| 542 |
-
|
| 543 |
-
|
| 544 |
-
|
| 545 |
-
|
| 546 |
-
|
| 547 |
-
|
| 548 |
-
|
| 549 |
-
|
| 550 |
-
|
| 551 |
-
|
| 552 |
-
|
| 553 |
-
|
| 554 |
-
|
| 555 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 556 |
if not last_worker_status and isinstance(last_result, dict):
|
| 557 |
last_worker_status = last_result.get("status")
|
| 558 |
if not last_worker_reason and isinstance(last_result, dict):
|
| 559 |
last_worker_reason = last_result.get("reason")
|
| 560 |
|
| 561 |
-
|
| 562 |
-
|
| 563 |
-
|
| 564 |
-
|
| 565 |
-
|
| 566 |
-
"
|
| 567 |
-
"
|
| 568 |
-
"
|
| 569 |
-
"
|
| 570 |
-
|
| 571 |
-
|
| 572 |
-
|
| 573 |
-
|
| 574 |
-
|
| 575 |
-
|
| 576 |
-
|
| 577 |
-
|
| 578 |
-
|
| 579 |
-
|
| 580 |
-
|
| 581 |
-
"lastResult": last_result,
|
| 582 |
-
},
|
| 583 |
-
"portfolio": {
|
| 584 |
-
"cash": round(cash, 2),
|
| 585 |
-
"equity": equity_value,
|
| 586 |
-
"positionCount": len(positions),
|
| 587 |
-
"positions": [
|
| 588 |
-
{
|
| 589 |
-
"symbol": sym,
|
| 590 |
-
"quantity": pos.get("qty"),
|
| 591 |
-
"avgCost": pos.get("avg_cost"),
|
| 592 |
-
"entryDate": pos.get("entry_date"),
|
| 593 |
-
"notional": (pos.get("qty", 0)) * (pos.get("avg_cost", 0)),
|
| 594 |
-
}
|
| 595 |
-
for sym, pos in positions.items()
|
| 596 |
-
],
|
| 597 |
-
"pnlPct": round(((equity_value / float((state or {}).get("initial_cash", 100000) or 100000) - 1) * 100), 2) if equity_value else 0,
|
| 598 |
-
"unrealizedPnl": latest_eq.get("unrealized_pnl", 0) if latest_eq else 0,
|
| 599 |
-
},
|
| 600 |
-
"equityCurve": [
|
| 601 |
-
{
|
| 602 |
-
"date": item.get("date"),
|
| 603 |
-
"equity": item.get("equity"),
|
| 604 |
-
"cash": item.get("cash"),
|
| 605 |
-
"positions": item.get("positions_count"),
|
| 606 |
-
"realizedPnl": item.get("realized_pnl_today", 0),
|
| 607 |
-
}
|
| 608 |
-
for item in equity
|
| 609 |
-
],
|
| 610 |
-
"openTrades": [
|
| 611 |
{
|
| 612 |
-
"symbol":
|
| 613 |
-
"quantity":
|
| 614 |
-
"
|
| 615 |
-
"entryDate":
|
| 616 |
-
"
|
| 617 |
-
"predictedReturn": trade.get("predicted_return"),
|
| 618 |
-
"commission": trade.get("entry_commission"),
|
| 619 |
}
|
| 620 |
-
for
|
| 621 |
],
|
| 622 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 623 |
{
|
| 624 |
-
"symbol":
|
| 625 |
-
"
|
| 626 |
-
"
|
| 627 |
-
"
|
| 628 |
-
"entryDate": trade.get("entry_date"),
|
| 629 |
-
"exitDate": trade.get("exit_date"),
|
| 630 |
-
"netPnl": trade.get("net_pnl"),
|
| 631 |
-
"returnPct": trade.get("return_pct"),
|
| 632 |
-
"holdingDays": trade.get("holding_days"),
|
| 633 |
-
"exitReason": trade.get("exit_reason"),
|
| 634 |
}
|
| 635 |
-
for
|
| 636 |
],
|
| 637 |
-
|
| 638 |
-
|
| 639 |
-
|
| 640 |
-
"
|
| 641 |
-
"
|
| 642 |
-
"
|
| 643 |
-
"
|
| 644 |
-
"
|
| 645 |
-
"
|
| 646 |
-
"
|
| 647 |
-
"
|
| 648 |
-
|
| 649 |
-
|
| 650 |
-
|
| 651 |
-
|
| 652 |
-
"winRate": round((stats["wins"] / stats["trades"]) * 100) if stats["trades"] > 0 else 0,
|
| 653 |
-
}
|
| 654 |
-
for sym, stats in symbol_stats.items()
|
| 655 |
-
],
|
| 656 |
-
},
|
| 657 |
-
"signals": [
|
| 658 |
-
{
|
| 659 |
-
"date": signal.get("date"),
|
| 660 |
-
"symbol": signal.get("symbol"),
|
| 661 |
-
"signal": signal.get("signal"),
|
| 662 |
-
"mlSignal": signal.get("ml_signal"),
|
| 663 |
-
"techSignal": signal.get("tech_signal"),
|
| 664 |
-
"confidence": signal.get("confidence"),
|
| 665 |
-
"predictedReturn": signal.get("predicted_return"),
|
| 666 |
-
"actionTaken": signal.get("action_taken"),
|
| 667 |
-
}
|
| 668 |
-
for signal in signals[-50:]
|
| 669 |
-
][::-1],
|
| 670 |
-
"eligibleStocks": eligible,
|
| 671 |
-
"scan": {
|
| 672 |
-
"universe": (scan_data or {}).get("universe"),
|
| 673 |
-
"completed": bool((scan_data or {}).get("completed", False)),
|
| 674 |
-
"stage1Count": len((scan_data or {}).get("stage1", {})),
|
| 675 |
-
"stage2Count": len((scan_data or {}).get("stage2", {})),
|
| 676 |
-
"updatedAt": (scan_data or {}).get("updated_at"),
|
| 677 |
-
},
|
| 678 |
-
}
|
| 679 |
-
|
| 680 |
-
markets = {market_id: build_market_snapshot(market_id) for market_id in _SUPPORTED_MARKETS}
|
| 681 |
-
default_market = markets[DEFAULT_MARKET_ID]
|
| 682 |
-
|
| 683 |
-
return {
|
| 684 |
-
"status": default_market["status"],
|
| 685 |
-
"portfolio": default_market["portfolio"],
|
| 686 |
-
"equityCurve": default_market["equityCurve"],
|
| 687 |
-
"openTrades": default_market["openTrades"],
|
| 688 |
-
"closedTrades": default_market["closedTrades"],
|
| 689 |
-
"performance": default_market["performance"],
|
| 690 |
-
"signals": default_market["signals"],
|
| 691 |
-
"eligibleStocks": default_market["eligibleStocks"],
|
| 692 |
-
"markets": markets,
|
| 693 |
-
"activeMarketId": DEFAULT_MARKET_ID,
|
| 694 |
"timestamp": datetime.now().isoformat(),
|
| 695 |
}
|
| 696 |
|
|
@@ -712,7 +479,6 @@ class MLPredictionRequest(BaseModel):
|
|
| 712 |
symbols: List[str]
|
| 713 |
days_ahead: int = 7
|
| 714 |
model: str = "ensemble" # ensemble, xgboost, lightgbm, rf
|
| 715 |
-
market: str = DEFAULT_MARKET_ID
|
| 716 |
|
| 717 |
|
| 718 |
@app.get("/")
|
|
@@ -803,7 +569,6 @@ def get_ml_predictions(request: MLPredictionRequest):
|
|
| 803 |
symbols=request.symbols,
|
| 804 |
days_ahead=request.days_ahead,
|
| 805 |
model_type=request.model,
|
| 806 |
-
market_id=request.market,
|
| 807 |
)
|
| 808 |
|
| 809 |
# If data providers rate-limit / return empty, avoid 500 and let UI degrade gracefully.
|
|
@@ -818,7 +583,6 @@ def get_ml_predictions(request: MLPredictionRequest):
|
|
| 818 |
return {
|
| 819 |
"model": request.model,
|
| 820 |
"days_ahead": request.days_ahead,
|
| 821 |
-
"market": request.market,
|
| 822 |
"predictions": results
|
| 823 |
}
|
| 824 |
except HTTPException:
|
|
@@ -1409,12 +1173,9 @@ def get_news(
|
|
| 1409 |
# If dead, restart it automatically. This survives HF Space container recycles.
|
| 1410 |
|
| 1411 |
def _worker_watchdog():
|
| 1412 |
-
"""Background thread: checks
|
| 1413 |
import subprocess
|
| 1414 |
-
|
| 1415 |
-
"bist": Path("paper_trading/auto_trader/worker.pid"),
|
| 1416 |
-
"us": Path("paper_trading/markets/us/auto_trader/worker.pid"),
|
| 1417 |
-
}
|
| 1418 |
CHECK_INTERVAL = 300 # 5 minutes
|
| 1419 |
|
| 1420 |
# Give the worker (started by start.sh) time to boot
|
|
@@ -1422,40 +1183,45 @@ def _worker_watchdog():
|
|
| 1422 |
|
| 1423 |
while True:
|
| 1424 |
try:
|
| 1425 |
-
|
| 1426 |
-
|
| 1427 |
-
|
| 1428 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1429 |
try:
|
| 1430 |
-
|
| 1431 |
-
|
| 1432 |
-
|
| 1433 |
-
|
| 1434 |
-
|
| 1435 |
-
|
| 1436 |
-
|
| 1437 |
-
|
| 1438 |
-
|
| 1439 |
-
|
| 1440 |
-
|
| 1441 |
-
subprocess.
|
| 1442 |
-
|
| 1443 |
-
|
| 1444 |
-
|
| 1445 |
-
|
| 1446 |
-
|
| 1447 |
-
|
| 1448 |
-
|
| 1449 |
-
if pid_file.exists():
|
| 1450 |
-
print(f"[watchdog] {market} worker restarted (PID={pid_file.read_text().strip()})")
|
| 1451 |
-
else:
|
| 1452 |
-
print(f"[watchdog] {market} worker restart attempted but no PID file yet")
|
| 1453 |
except Exception as e:
|
| 1454 |
print(f"[watchdog] Error: {e}")
|
| 1455 |
|
| 1456 |
_time.sleep(CHECK_INTERVAL)
|
| 1457 |
|
| 1458 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1459 |
# ─── Trading API Endpoints ──────────────────────────────────────────────────
|
| 1460 |
|
| 1461 |
|
|
@@ -1479,17 +1245,14 @@ def get_trading_data():
|
|
| 1479 |
@app.post("/api/trading")
|
| 1480 |
def trading_action(req: TradingActionRequest):
|
| 1481 |
"""Execute trading actions: run, start_worker, stop_worker, kill, unkill."""
|
| 1482 |
-
market_id = (req.market or "bist").strip().lower()
|
| 1483 |
paper_dir = Path("paper_trading")
|
| 1484 |
-
|
| 1485 |
-
|
| 1486 |
-
kill_file = paper_dir / "kill_switch.json" if market_id == "bist" else market_dir / ".kill_switch"
|
| 1487 |
-
pid_file = auto_trader_dir / "worker.pid"
|
| 1488 |
|
| 1489 |
if req.action == "run":
|
| 1490 |
try:
|
| 1491 |
from trading.worker import TradingWorker
|
| 1492 |
-
worker = TradingWorker(mode="paper"
|
| 1493 |
result = worker.run_cycle(force=False)
|
| 1494 |
return {"success": True, "output": str(result), "result": result}
|
| 1495 |
except Exception as e:
|
|
@@ -1498,7 +1261,7 @@ def trading_action(req: TradingActionRequest):
|
|
| 1498 |
if req.action == "run_force":
|
| 1499 |
try:
|
| 1500 |
from trading.worker import TradingWorker
|
| 1501 |
-
worker = TradingWorker(mode="paper"
|
| 1502 |
result = worker.run_cycle(force=True)
|
| 1503 |
return {"success": True, "output": str(result), "result": result}
|
| 1504 |
except Exception as e:
|
|
@@ -1524,7 +1287,7 @@ def trading_action(req: TradingActionRequest):
|
|
| 1524 |
|
| 1525 |
cwd = str(Path(__file__).parent)
|
| 1526 |
proc = subprocess.Popen(
|
| 1527 |
-
[sys.executable, "-m", "trading.worker", "--daemon", "--mode", "paper", "--
|
| 1528 |
cwd=cwd,
|
| 1529 |
stdout=subprocess.DEVNULL,
|
| 1530 |
stderr=subprocess.DEVNULL,
|
|
@@ -1566,94 +1329,76 @@ def trading_action(req: TradingActionRequest):
|
|
| 1566 |
return {"error": f"Worker durdurma hatası: {str(e)}"}
|
| 1567 |
|
| 1568 |
elif req.action == "kill":
|
| 1569 |
-
|
| 1570 |
-
|
| 1571 |
kill_file.write_text(json.dumps({
|
| 1572 |
"active": True,
|
| 1573 |
"reason": req.reason or "Manual kill switch from UI",
|
| 1574 |
"activated_at": datetime.now().isoformat(),
|
| 1575 |
-
"market_id": market_id,
|
| 1576 |
}))
|
| 1577 |
-
return {"success": True, "action": "kill", "
|
| 1578 |
|
| 1579 |
elif req.action == "unkill":
|
| 1580 |
try:
|
| 1581 |
kill_file.unlink(missing_ok=True)
|
| 1582 |
except Exception:
|
| 1583 |
pass
|
| 1584 |
-
return {"success": True, "action": "unkill", "
|
| 1585 |
|
| 1586 |
elif req.action == "scan":
|
| 1587 |
-
# Trigger
|
| 1588 |
global _scan_thread, _scan_status
|
| 1589 |
import threading
|
| 1590 |
|
| 1591 |
if _scan_status.get("running"):
|
| 1592 |
return {"success": False, "error": "Tarama zaten çalışıyor", "scanStatus": _scan_status}
|
| 1593 |
|
| 1594 |
-
#
|
| 1595 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1596 |
force = bool(req.force) if req.force is not None else False
|
|
|
|
| 1597 |
|
| 1598 |
def _run_scan():
|
| 1599 |
global _scan_status
|
| 1600 |
try:
|
| 1601 |
-
scan_label = universe.upper()
|
| 1602 |
_scan_status = {
|
| 1603 |
"running": True,
|
| 1604 |
-
"progress": f"{
|
| 1605 |
"started_at": datetime.now().isoformat(),
|
| 1606 |
"universe": universe,
|
| 1607 |
-
"
|
| 1608 |
}
|
| 1609 |
-
|
| 1610 |
-
|
| 1611 |
-
|
| 1612 |
-
|
| 1613 |
-
|
| 1614 |
-
|
| 1615 |
-
|
| 1616 |
-
if market_id == "us":
|
| 1617 |
-
from run_us_scan import run_scan
|
| 1618 |
-
else:
|
| 1619 |
-
from run_bist100_scan import run_scan
|
| 1620 |
-
_scan_status["progress"] = "Stage 1: Likidite filtresi..."
|
| 1621 |
-
run_scan(universe=universe, force=force, stage1_only=False)
|
| 1622 |
_scan_status = {
|
| 1623 |
"running": False,
|
| 1624 |
-
"progress": "Tarama tamamlandı!",
|
| 1625 |
"finished_at": datetime.now().isoformat(),
|
| 1626 |
"universe": universe,
|
| 1627 |
-
"
|
| 1628 |
}
|
| 1629 |
-
# Telegram: tarama bitti
|
| 1630 |
-
try:
|
| 1631 |
-
import telegram_bot as _tg_bot
|
| 1632 |
-
scan_file = get_scan_results_path(market_id, completed=True)
|
| 1633 |
-
if scan_file.exists():
|
| 1634 |
-
_tg_bot.notify_scan_complete(json.loads(scan_file.read_text()))
|
| 1635 |
-
except Exception:
|
| 1636 |
-
pass
|
| 1637 |
-
|
| 1638 |
except Exception as e:
|
| 1639 |
_scan_status = {
|
| 1640 |
"running": False,
|
| 1641 |
-
"progress": f"Tarama hatası: {e}",
|
| 1642 |
"error": str(e),
|
| 1643 |
"universe": universe,
|
| 1644 |
-
"
|
| 1645 |
}
|
| 1646 |
-
try:
|
| 1647 |
-
import telegram_bot as _tg_bot
|
| 1648 |
-
_tg_bot.notify_error("Tarama Hatası", str(e))
|
| 1649 |
-
except Exception:
|
| 1650 |
-
pass
|
| 1651 |
|
| 1652 |
-
_scan_thread = threading.Thread(target=_run_scan, daemon=True, name="
|
| 1653 |
_scan_thread.start()
|
| 1654 |
return {
|
| 1655 |
"success": True,
|
| 1656 |
-
"message": f"{
|
| 1657 |
"scanStatus": _scan_status,
|
| 1658 |
}
|
| 1659 |
|
|
@@ -1666,13 +1411,13 @@ def trading_action(req: TradingActionRequest):
|
|
| 1666 |
|
| 1667 |
class TradingSyncPayload(BaseModel):
|
| 1668 |
sync_key: str
|
| 1669 |
-
market_id: Optional[str] = None
|
| 1670 |
state: Optional[dict] = None
|
| 1671 |
status: Optional[dict] = None
|
| 1672 |
trades: Optional[list] = None
|
| 1673 |
equity_curve: Optional[list] = None
|
| 1674 |
signals: Optional[list] = None
|
| 1675 |
scan_results: Optional[dict] = None
|
|
|
|
| 1676 |
|
| 1677 |
|
| 1678 |
@app.post("/api/trading/sync")
|
|
@@ -1683,14 +1428,9 @@ def trading_sync(payload: TradingSyncPayload):
|
|
| 1683 |
raise HTTPException(status_code=403, detail="Invalid sync key")
|
| 1684 |
|
| 1685 |
paper_dir = Path("paper_trading")
|
| 1686 |
-
|
| 1687 |
-
|
| 1688 |
-
auto_dir = market_dir / "auto_trader"
|
| 1689 |
-
journal_dir = paper_dir / "journal" if market_id == DEFAULT_MARKET_ID else market_dir / "journal"
|
| 1690 |
-
scan_file = get_scan_results_path(market_id, completed=True)
|
| 1691 |
-
|
| 1692 |
paper_dir.mkdir(parents=True, exist_ok=True)
|
| 1693 |
-
market_dir.mkdir(parents=True, exist_ok=True)
|
| 1694 |
auto_dir.mkdir(parents=True, exist_ok=True)
|
| 1695 |
journal_dir.mkdir(parents=True, exist_ok=True)
|
| 1696 |
|
|
@@ -1724,29 +1464,29 @@ def trading_sync(payload: TradingSyncPayload):
|
|
| 1724 |
_atomic_write(journal_dir / "signals_log.jsonl", "\n".join(lines) + "\n" if lines else "")
|
| 1725 |
synced.append(f"signals({len(payload.signals)})")
|
| 1726 |
|
| 1727 |
-
# 6. scan_results
|
|
|
|
| 1728 |
if payload.scan_results is not None:
|
| 1729 |
-
|
| 1730 |
-
|
|
|
|
|
|
|
|
|
|
| 1731 |
|
| 1732 |
# 7. Update in-memory cache (survives ephemeral filesystem resets)
|
| 1733 |
global _remote_cache
|
| 1734 |
-
|
| 1735 |
-
|
| 1736 |
-
|
| 1737 |
-
|
| 1738 |
-
"trades"
|
| 1739 |
-
|
| 1740 |
-
"
|
| 1741 |
-
|
| 1742 |
-
"
|
| 1743 |
-
|
| 1744 |
-
|
| 1745 |
-
|
| 1746 |
-
_remote_cache = {
|
| 1747 |
-
"markets": markets_cache,
|
| 1748 |
-
"updated_at": datetime.now().isoformat(),
|
| 1749 |
-
}
|
| 1750 |
|
| 1751 |
return {
|
| 1752 |
"success": True,
|
|
@@ -1763,30 +1503,31 @@ def _atomic_write(path: Path, content: str) -> None:
|
|
| 1763 |
|
| 1764 |
|
| 1765 |
@app.get("/api/eligible")
|
| 1766 |
-
def get_eligible_stocks(market: str = Query(
|
| 1767 |
"""
|
| 1768 |
-
GET /api/eligible
|
| 1769 |
-
Returns list of eligible stocks from
|
| 1770 |
"""
|
| 1771 |
-
|
| 1772 |
-
if market_id not in _SUPPORTED_MARKETS:
|
| 1773 |
-
market_id = DEFAULT_MARKET_ID
|
| 1774 |
|
| 1775 |
-
|
|
|
|
|
|
|
| 1776 |
|
| 1777 |
-
|
| 1778 |
-
|
| 1779 |
-
|
|
|
|
|
|
|
| 1780 |
|
| 1781 |
-
if not
|
| 1782 |
return {
|
| 1783 |
"ok": False,
|
| 1784 |
-
"error": f"Scan results not found
|
| 1785 |
-
"scanRunning":
|
| 1786 |
-
"scanProgress":
|
| 1787 |
"scanCompleted": False,
|
| 1788 |
-
"universe":
|
| 1789 |
-
"market": market_id,
|
| 1790 |
"totalStocks": None,
|
| 1791 |
"stage1Done": None,
|
| 1792 |
"stage2Done": None,
|
|
@@ -1806,8 +1547,36 @@ def get_eligible_stocks(market: str = Query(DEFAULT_MARKET_ID, description="bist
|
|
| 1806 |
}
|
| 1807 |
|
| 1808 |
try:
|
| 1809 |
-
|
| 1810 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1811 |
|
| 1812 |
completed = bool(data.get("completed", False))
|
| 1813 |
universe = data.get("universe")
|
|
@@ -1852,12 +1621,23 @@ def get_eligible_stocks(market: str = Query(DEFAULT_MARKET_ID, description="bist
|
|
| 1852 |
stage1_total = 0
|
| 1853 |
|
| 1854 |
# Be explicit: stage2 empty = scan running, or stage1 filtered everything.
|
| 1855 |
-
base_error = f"Scan results not ready yet
|
| 1856 |
-
if not
|
| 1857 |
-
base_error = "Stage 2 empty: Stage 1 filtered all stocks (no symbols passed)."
|
| 1858 |
-
|
| 1859 |
-
scan_running = bool(
|
| 1860 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1861 |
|
| 1862 |
return {
|
| 1863 |
"ok": False,
|
|
@@ -1883,7 +1663,6 @@ def get_eligible_stocks(market: str = Query(DEFAULT_MARKET_ID, description="bist
|
|
| 1883 |
"avgHitRate": 0,
|
| 1884 |
},
|
| 1885 |
"timestamp": data.get("scan_finished") or data.get("updated_at") or data.get("scan_started"),
|
| 1886 |
-
"market": market_id,
|
| 1887 |
}
|
| 1888 |
|
| 1889 |
eligible = []
|
|
@@ -1962,11 +1741,17 @@ def get_eligible_stocks(market: str = Query(DEFAULT_MARKET_ID, description="bist
|
|
| 1962 |
avg_hit_rate = sum(e.get("hit_rate", 0) for e in eligible) / len(eligible) if eligible else 0
|
| 1963 |
|
| 1964 |
# Resolve scan progress for ok:true (stage2 has data)
|
| 1965 |
-
scan_running_flag = bool(
|
| 1966 |
if not completed:
|
| 1967 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1968 |
else:
|
| 1969 |
-
scan_progress_str =
|
| 1970 |
|
| 1971 |
return {
|
| 1972 |
"ok": True,
|
|
@@ -1974,7 +1759,6 @@ def get_eligible_stocks(market: str = Query(DEFAULT_MARKET_ID, description="bist
|
|
| 1974 |
"scanProgress": scan_progress_str,
|
| 1975 |
"scanCompleted": completed,
|
| 1976 |
"universe": universe,
|
| 1977 |
-
"market": market_id,
|
| 1978 |
"totalStocks": total_stocks,
|
| 1979 |
"stage1Done": stage1_passed_count + len(stage1_failures),
|
| 1980 |
"stage2Done": stage2_done,
|
|
@@ -1998,130 +1782,11 @@ def get_eligible_stocks(market: str = Query(DEFAULT_MARKET_ID, description="bist
|
|
| 1998 |
return {
|
| 1999 |
"ok": False,
|
| 2000 |
"error": str(e),
|
| 2001 |
-
"scanRunning":
|
| 2002 |
-
"scanProgress":
|
| 2003 |
-
"market": market_id,
|
| 2004 |
}
|
| 2005 |
|
| 2006 |
|
| 2007 |
-
@app.get("/api/diagnose")
|
| 2008 |
-
def diagnose(market: str = Query("all", description="bist, us, or all")):
|
| 2009 |
-
"""Internal diagnostics — process list, telegram proxy, scan age, worker health."""
|
| 2010 |
-
import subprocess, sys
|
| 2011 |
-
from datetime import timedelta
|
| 2012 |
-
|
| 2013 |
-
selected = [market] if market in _SUPPORTED_MARKETS else list(_SUPPORTED_MARKETS)
|
| 2014 |
-
diag: Dict[str, Any] = {"markets": {}}
|
| 2015 |
-
|
| 2016 |
-
def market_paths(market_id: str) -> Dict[str, Path]:
|
| 2017 |
-
paper_dir = Path("paper_trading")
|
| 2018 |
-
market_dir = paper_dir if market_id == DEFAULT_MARKET_ID else paper_dir / "markets" / market_id
|
| 2019 |
-
auto_dir = market_dir / "auto_trader"
|
| 2020 |
-
return {
|
| 2021 |
-
"scan": get_scan_results_path(market_id, completed=True),
|
| 2022 |
-
"pid": auto_dir / "worker.pid",
|
| 2023 |
-
"state": auto_dir / "state.json",
|
| 2024 |
-
"db": get_trading_db_path(market_id),
|
| 2025 |
-
}
|
| 2026 |
-
|
| 2027 |
-
for market_id in selected:
|
| 2028 |
-
market_diag: Dict[str, Any] = {}
|
| 2029 |
-
paths = market_paths(market_id)
|
| 2030 |
-
|
| 2031 |
-
if paths["scan"].exists():
|
| 2032 |
-
try:
|
| 2033 |
-
sdata = json.loads(paths["scan"].read_text())
|
| 2034 |
-
finished = sdata.get("scan_finished") or sdata.get("updated_at")
|
| 2035 |
-
if finished:
|
| 2036 |
-
scan_dt = datetime.fromisoformat(str(finished).replace("Z", "+00:00"))
|
| 2037 |
-
age_h = (datetime.now(timezone.utc) - scan_dt).total_seconds() / 3600
|
| 2038 |
-
market_diag["scan_age_hours"] = round(age_h, 1)
|
| 2039 |
-
market_diag["scan_stale"] = age_h > 72
|
| 2040 |
-
market_diag["scan_completed"] = sdata.get("completed", False)
|
| 2041 |
-
market_diag["scan_date"] = sdata.get("scan_date", "?")[:10]
|
| 2042 |
-
stage2 = sdata.get("stage2", {})
|
| 2043 |
-
market_diag["eligible_count"] = sum(1 for value in stage2.values() if value.get("eligible"))
|
| 2044 |
-
except Exception as exc:
|
| 2045 |
-
market_diag["scan_error"] = str(exc)
|
| 2046 |
-
else:
|
| 2047 |
-
market_diag["scan_file"] = "missing"
|
| 2048 |
-
|
| 2049 |
-
if paths["pid"].exists():
|
| 2050 |
-
try:
|
| 2051 |
-
pid = int(paths["pid"].read_text().strip())
|
| 2052 |
-
os.kill(pid, 0)
|
| 2053 |
-
market_diag["worker_pid"] = pid
|
| 2054 |
-
market_diag["worker_alive"] = True
|
| 2055 |
-
except Exception:
|
| 2056 |
-
market_diag["worker_alive"] = False
|
| 2057 |
-
market_diag["worker_pid"] = None
|
| 2058 |
-
else:
|
| 2059 |
-
market_diag["worker_alive"] = False
|
| 2060 |
-
market_diag["worker_pid"] = None
|
| 2061 |
-
|
| 2062 |
-
market_diag["files"] = {
|
| 2063 |
-
"worker_pid": paths["pid"].exists(),
|
| 2064 |
-
"scan_results": paths["scan"].exists(),
|
| 2065 |
-
"state_json": paths["state"].exists(),
|
| 2066 |
-
"trading_db": paths["db"].exists(),
|
| 2067 |
-
}
|
| 2068 |
-
diag["markets"][market_id] = market_diag
|
| 2069 |
-
|
| 2070 |
-
if market in _SUPPORTED_MARKETS:
|
| 2071 |
-
diag.update(diag["markets"][market])
|
| 2072 |
-
diag["market"] = market
|
| 2073 |
-
|
| 2074 |
-
# 3. Scan scheduler thread
|
| 2075 |
-
diag["scheduler_running"] = _scheduler_thread.is_alive() if _scheduler_thread else False
|
| 2076 |
-
diag["watchdog_running"] = _watchdog_thread.is_alive() if _watchdog_thread else False
|
| 2077 |
-
diag["scan_status"] = _scan_status
|
| 2078 |
-
|
| 2079 |
-
# 4. Telegram proxy test
|
| 2080 |
-
proxy_url = os.environ.get("TELEGRAM_PROXY_URL", "https://telegram-proxy-vercel-neon.vercel.app/tgproxy")
|
| 2081 |
-
try:
|
| 2082 |
-
import urllib.request as _ureq
|
| 2083 |
-
_ureq.urlopen(proxy_url, timeout=5)
|
| 2084 |
-
diag["telegram_proxy"] = "ok"
|
| 2085 |
-
except Exception as e:
|
| 2086 |
-
diag["telegram_proxy"] = f"error: {e}"
|
| 2087 |
-
|
| 2088 |
-
# 5. Direct Telegram API reachability (actual bot API call)
|
| 2089 |
-
try:
|
| 2090 |
-
import http.client as _hc2, ssl as _ssl2
|
| 2091 |
-
_ctx = _ssl2.create_default_context()
|
| 2092 |
-
_conn = _hc2.HTTPSConnection("api.telegram.org", context=_ctx, timeout=5)
|
| 2093 |
-
_bot_token = os.environ.get("TELEGRAM_BOT_TOKEN", "")
|
| 2094 |
-
_conn.request("GET", f"/bot{_bot_token}/getMe")
|
| 2095 |
-
_resp = _conn.getresponse()
|
| 2096 |
-
_body = json.loads(_resp.read().decode())
|
| 2097 |
-
diag["telegram_direct"] = f"ok (getMe: {_body.get('ok')})"
|
| 2098 |
-
_conn.close()
|
| 2099 |
-
except Exception as e:
|
| 2100 |
-
diag["telegram_direct"] = f"error: {e}"
|
| 2101 |
-
|
| 2102 |
-
# 5b. DNS debug — essential checks only
|
| 2103 |
-
dns_debug = {}
|
| 2104 |
-
import socket as _s
|
| 2105 |
-
dns_debug["getaddrinfo_patched"] = getattr(_s.getaddrinfo, "__name__", "?") == "_tg_dns_fix"
|
| 2106 |
-
try:
|
| 2107 |
-
sock = _s.create_connection(("api.telegram.org", 443), timeout=5)
|
| 2108 |
-
sock.close()
|
| 2109 |
-
dns_debug["tcp_telegram"] = "ok"
|
| 2110 |
-
except Exception as e:
|
| 2111 |
-
dns_debug["tcp_telegram"] = f"error: {e}"
|
| 2112 |
-
diag["dns_debug"] = dns_debug
|
| 2113 |
-
|
| 2114 |
-
# 6. Env vars check (non-secret)
|
| 2115 |
-
diag["env"] = {
|
| 2116 |
-
"TELEGRAM_BOT_TOKEN": "set" if os.environ.get("TELEGRAM_BOT_TOKEN") else "MISSING",
|
| 2117 |
-
"TELEGRAM_CHAT_ID": "set" if os.environ.get("TELEGRAM_CHAT_ID") else "MISSING",
|
| 2118 |
-
"TELEGRAM_PROXY_URL": os.environ.get("TELEGRAM_PROXY_URL", "(default)"),
|
| 2119 |
-
}
|
| 2120 |
-
|
| 2121 |
-
diag["timestamp"] = datetime.now(timezone.utc).isoformat()
|
| 2122 |
-
return diag
|
| 2123 |
-
|
| 2124 |
-
|
| 2125 |
# For Hugging Face Spaces
|
| 2126 |
if __name__ == "__main__":
|
| 2127 |
import uvicorn
|
|
|
|
| 2 |
Hugging Face Space: Borsa ML API
|
| 3 |
FastAPI backend for stock analysis and ML predictions
|
| 4 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
from fastapi import FastAPI, HTTPException, Query
|
| 6 |
from fastapi.middleware.cors import CORSMiddleware
|
| 7 |
from pydantic import BaseModel
|
| 8 |
+
from typing import List, Optional
|
| 9 |
import sys
|
| 10 |
import os
|
| 11 |
import json
|
| 12 |
+
from datetime import datetime
|
| 13 |
from pathlib import Path
|
| 14 |
|
| 15 |
import math
|
| 16 |
|
|
|
|
|
|
|
| 17 |
# Add parent directory to path for imports
|
| 18 |
sys.path.insert(0, os.path.dirname(__file__))
|
| 19 |
|
|
|
|
| 54 |
|
| 55 |
# ─── Remote sync cache (survives ephemeral filesystem resets) ─────────
|
| 56 |
_remote_cache: dict = {}
|
|
|
|
| 57 |
|
| 58 |
# ─── Daily Auto-Scan Scheduler ────────────────────────
|
| 59 |
# Runs a full-market scan every day at 19:00 Turkey time (16:00 UTC)
|
|
|
|
| 62 |
import threading
|
| 63 |
import time as _time
|
| 64 |
|
| 65 |
+
def _make_scan_progress_cb(label: str):
|
| 66 |
+
"""Return a progress callback that updates the global _scan_status."""
|
| 67 |
+
def _cb(stage: str, info: dict) -> None:
|
| 68 |
+
global _scan_status
|
| 69 |
+
idx = info.get("index", "")
|
| 70 |
+
total = info.get("total", "")
|
| 71 |
+
sym = info.get("symbol", "")
|
| 72 |
+
s1p = info.get("stage1_passed", 0)
|
| 73 |
+
s1f = info.get("stage1_failed", 0)
|
| 74 |
+
elig = info.get("eligible_count", 0)
|
| 75 |
+
excl = info.get("excluded_count", 0)
|
| 76 |
+
if stage in ("stage1_start", "stage1"):
|
| 77 |
+
_scan_status["progress"] = (
|
| 78 |
+
f"[{label}] Stage 1: {idx}/{total} ({sym}). "
|
| 79 |
+
f"Geçti: {s1p}, Elendi: {s1f}"
|
| 80 |
+
)
|
| 81 |
+
elif stage in ("stage2_start", "stage2"):
|
| 82 |
+
_scan_status["progress"] = (
|
| 83 |
+
f"[{label}] Stage 2: {idx}/{total} ({sym}). "
|
| 84 |
+
f"Eligible: {elig}, Elenen: {excl}"
|
| 85 |
+
)
|
| 86 |
+
return _cb
|
| 87 |
|
| 88 |
+
|
| 89 |
+
def _run_scan_in_thread(universe: str, label: str, stage1_only: bool = False, market_id: str = "bist"):
|
| 90 |
+
"""Run a scan in a background thread. Non-blocking."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 91 |
global _scan_thread, _scan_status
|
|
|
|
|
|
|
| 92 |
|
| 93 |
+
if _scan_status.get("running"):
|
| 94 |
+
print(f"[scheduler] Scan already running, skipping {label}")
|
| 95 |
+
return
|
| 96 |
+
|
| 97 |
+
title = "BIST" if market_id == "bist" else market_id.upper()
|
| 98 |
+
_scan_status = {
|
| 99 |
+
"running": True,
|
| 100 |
+
"progress": f"{label} taraması başlatılıyor ({universe}, {title})...",
|
| 101 |
+
"started_at": datetime.now().isoformat(),
|
| 102 |
+
"universe": universe,
|
| 103 |
+
"market_id": market_id,
|
| 104 |
+
"scheduled": True,
|
| 105 |
+
}
|
| 106 |
|
| 107 |
+
def _do_scan():
|
| 108 |
+
global _scan_status
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
try:
|
| 110 |
+
from trading.scanner_engine import ScanConfig, run_scan as _engine_scan
|
| 111 |
+
import logging as _logging
|
| 112 |
+
_scan_logger = _logging.getLogger(f"scan-{label.lower()}-{market_id}")
|
| 113 |
+
config = ScanConfig(market_id=market_id, universe_name=universe, title=title)
|
| 114 |
+
_engine_scan(config, _scan_logger, force=False, stage1_only=stage1_only)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 115 |
_scan_status = {
|
| 116 |
"running": False,
|
| 117 |
+
"progress": f"{label} taraması tamamlandı ({universe}, {title})!",
|
| 118 |
"finished_at": datetime.now().isoformat(),
|
| 119 |
+
"universe": universe,
|
| 120 |
+
"market_id": market_id,
|
| 121 |
"scheduled": True,
|
| 122 |
}
|
| 123 |
+
print(f"[scheduler] {label} scan ({market_id}/{universe}) completed at {datetime.now().isoformat()}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 124 |
except Exception as e:
|
| 125 |
_scan_status = {
|
| 126 |
"running": False,
|
| 127 |
+
"progress": f"{label} tarama hatası ({title}): {e}",
|
| 128 |
"error": str(e),
|
| 129 |
+
"universe": universe,
|
| 130 |
+
"market_id": market_id,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
"scheduled": True,
|
| 132 |
}
|
| 133 |
+
print(f"[scheduler] {label} scan error ({market_id}): {e}")
|
| 134 |
|
| 135 |
+
_scan_thread = threading.Thread(target=_do_scan, daemon=True, name=f"scan-{label.lower()}-{market_id}")
|
| 136 |
+
_scan_thread.start()
|
| 137 |
+
print(f"[scheduler] {label} scan thread started ({market_id}/{universe}, stage1_only={stage1_only})")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 138 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 139 |
|
| 140 |
+
def _daily_scan_scheduler():
|
| 141 |
+
"""Background thread: triggers scans for BIST and US markets.
|
| 142 |
|
| 143 |
+
Boot behaviour (non-blocking):
|
| 144 |
+
- If scan results missing/stale → start a stage1-only boot scan in a thread.
|
| 145 |
+
Stage1 takes ~5 min (vs hours for full scan) and gives the worker
|
| 146 |
+
enough data to trade while keeping memory low on HF free tier.
|
| 147 |
+
|
| 148 |
+
Nightly scans:
|
| 149 |
+
- BIST: 19:00 TR (1hr after BIST close)
|
| 150 |
+
- US: 00:30 TR (≈17:30 ET, 1.5hr after NYSE close)
|
| 151 |
+
"""
|
| 152 |
+
from datetime import timezone, timedelta
|
| 153 |
+
from trading.market_registry import get_scan_results_path
|
| 154 |
|
| 155 |
+
TR_TZ = timezone(timedelta(hours=3))
|
| 156 |
+
BIST_SCAN_HOUR_TR = 19
|
| 157 |
+
US_SCAN_HOUR_TR = 0 # midnight TR ≈ 17:00 ET (after NYSE 16:00 close)
|
| 158 |
+
US_SCAN_MIN_TR = 30
|
| 159 |
+
|
| 160 |
+
# ── 10s grace for uvicorn to bind port first ──────────────────────
|
| 161 |
+
_time.sleep(10)
|
| 162 |
+
|
| 163 |
+
# ── Helper: check if a market needs boot scan ─────────────────────
|
| 164 |
+
def _needs_boot_scan(market_id: str) -> bool:
|
| 165 |
+
scan_file = get_scan_results_path(market_id, completed=True)
|
| 166 |
+
work_file = get_scan_results_path(market_id, completed=False)
|
| 167 |
+
if not scan_file.exists() and not work_file.exists():
|
| 168 |
+
print(f"[scheduler] No {market_id} scan results — will run boot scan")
|
| 169 |
+
return True
|
| 170 |
+
if scan_file.exists():
|
| 171 |
+
try:
|
| 172 |
+
scan_data = json.loads(scan_file.read_text())
|
| 173 |
+
finished = scan_data.get("scan_finished") or scan_data.get("updated_at")
|
| 174 |
+
if finished:
|
| 175 |
+
scan_dt = datetime.fromisoformat(str(finished).replace("Z", "+00:00"))
|
| 176 |
+
age_hours = (datetime.now(timezone(timedelta(hours=0))) - scan_dt).total_seconds() / 3600
|
| 177 |
+
if age_hours > 72:
|
| 178 |
+
print(f"[scheduler] {market_id} scan results stale ({age_hours:.0f}h old)")
|
| 179 |
+
return True
|
| 180 |
+
else:
|
| 181 |
+
print(f"[scheduler] {market_id} scan results fresh ({age_hours:.0f}h old)")
|
| 182 |
+
return False
|
| 183 |
+
else:
|
| 184 |
+
return True
|
| 185 |
+
except Exception as e:
|
| 186 |
+
print(f"[scheduler] Error reading {market_id} scan file: {e}")
|
| 187 |
+
return True
|
| 188 |
+
return False
|
| 189 |
|
| 190 |
+
# ── Boot scans (non-blocking, stage1 only) ────────────────��───────
|
| 191 |
+
if _needs_boot_scan("bist"):
|
| 192 |
+
_run_scan_in_thread("bist100", "Boot-BIST", stage1_only=True, market_id="bist")
|
| 193 |
|
| 194 |
+
# Wait for BIST boot scan to finish before starting US (memory-safe)
|
| 195 |
+
_time.sleep(30)
|
| 196 |
+
while _scan_status.get("running"):
|
| 197 |
+
_time.sleep(15)
|
|
|
|
| 198 |
|
| 199 |
+
if _needs_boot_scan("us"):
|
| 200 |
+
_run_scan_in_thread("sp100", "Boot-US", stage1_only=True, market_id="us")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 201 |
|
| 202 |
+
# ── Nightly recurring scan loop ───────────────────────────────────
|
| 203 |
while True:
|
| 204 |
try:
|
| 205 |
+
now_tr = datetime.now(TR_TZ)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 206 |
|
| 207 |
+
# Next BIST scan: 19:00 TR
|
| 208 |
+
bist_target = now_tr.replace(hour=BIST_SCAN_HOUR_TR, minute=0, second=0, microsecond=0)
|
| 209 |
+
if now_tr >= bist_target:
|
| 210 |
+
bist_target = bist_target + timedelta(days=1)
|
| 211 |
+
|
| 212 |
+
# Next US scan: 00:30 TR
|
| 213 |
+
us_target = now_tr.replace(hour=US_SCAN_HOUR_TR, minute=US_SCAN_MIN_TR, second=0, microsecond=0)
|
| 214 |
+
if now_tr >= us_target:
|
| 215 |
+
us_target = us_target + timedelta(days=1)
|
| 216 |
+
|
| 217 |
+
# Pick whichever is sooner
|
| 218 |
+
if bist_target <= us_target:
|
| 219 |
+
next_target = bist_target
|
| 220 |
+
next_market = "bist"
|
| 221 |
+
next_universe = "bist100"
|
| 222 |
+
else:
|
| 223 |
+
next_target = us_target
|
| 224 |
+
next_market = "us"
|
| 225 |
+
next_universe = "sp100"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 226 |
|
| 227 |
+
wait_seconds = (next_target - now_tr).total_seconds()
|
| 228 |
+
print(f"[scheduler] Next auto-scan: {next_market}/{next_universe} at {next_target.isoformat()} (in {wait_seconds/3600:.1f}h)")
|
| 229 |
+
_time.sleep(wait_seconds)
|
| 230 |
|
| 231 |
+
_run_scan_in_thread(next_universe, f"Nightly-{next_market.upper()}", stage1_only=False, market_id=next_market)
|
| 232 |
except Exception as e:
|
| 233 |
+
print(f"[scheduler] Scheduler error: {e}")
|
| 234 |
_time.sleep(3600)
|
| 235 |
|
| 236 |
+
# Start scheduler on import
|
| 237 |
+
_scheduler_thread = threading.Thread(target=_daily_scan_scheduler, daemon=True, name="scan-scheduler")
|
| 238 |
+
_scheduler_thread.start()
|
| 239 |
+
print("[scheduler] Daily auto-scan scheduler started (19:00 TR / 16:00 UTC — BIST100)")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 240 |
|
| 241 |
|
| 242 |
def _get_trading_status():
|
| 243 |
"""Read trading status from file-based state."""
|
| 244 |
paper_dir = Path("paper_trading")
|
| 245 |
+
state_file = paper_dir / "auto_trader" / "state.json"
|
| 246 |
+
status_file = paper_dir / "auto_trader" / "status.json"
|
| 247 |
+
trades_file = paper_dir / "journal" / "trades.jsonl"
|
| 248 |
+
equity_file = paper_dir / "journal" / "daily_pnl.jsonl"
|
| 249 |
+
signals_file = paper_dir / "journal" / "signals_log.jsonl"
|
| 250 |
+
kill_file = paper_dir / "kill_switch.json"
|
| 251 |
+
scan_file = paper_dir / "bist100_scan_results.json"
|
| 252 |
+
pid_file = paper_dir / "auto_trader" / "worker.pid"
|
| 253 |
+
db_file = paper_dir / "trading.db"
|
| 254 |
+
|
| 255 |
+
def read_json(p):
|
| 256 |
try:
|
| 257 |
+
return json.loads(p.read_text()) if p.exists() else None
|
| 258 |
except Exception:
|
| 259 |
return None
|
| 260 |
|
| 261 |
+
def read_jsonl(p):
|
| 262 |
try:
|
| 263 |
+
if not p.exists():
|
| 264 |
return []
|
| 265 |
+
return [json.loads(line) for line in p.read_text().strip().split("\n") if line.strip()]
|
| 266 |
except Exception:
|
| 267 |
return []
|
| 268 |
|
| 269 |
+
def check_pid():
|
| 270 |
try:
|
| 271 |
if not pid_file.exists():
|
| 272 |
return False, None
|
|
|
|
| 276 |
except Exception:
|
| 277 |
return False, None
|
| 278 |
|
| 279 |
+
# Remote cache takes priority (survives ephemeral FS resets).
|
| 280 |
+
# Files are only used as fallback when cache is empty (fresh boot before first sync).
|
| 281 |
+
state = _remote_cache.get("state") or read_json(state_file)
|
| 282 |
+
status = _remote_cache.get("status") or read_json(status_file)
|
| 283 |
+
trades = _remote_cache.get("trades", []) or read_jsonl(trades_file)
|
| 284 |
+
equity = _remote_cache.get("equity_curve", []) or read_jsonl(equity_file)
|
| 285 |
+
signals = _remote_cache.get("signals", []) or read_jsonl(signals_file)
|
| 286 |
+
kill_switch = read_json(kill_file)
|
| 287 |
+
scan_data = _remote_cache.get("scan_results:bist") or read_json(scan_file)
|
| 288 |
+
worker_running, worker_pid = check_pid()
|
| 289 |
+
|
| 290 |
+
# SQLite fallback: worker now persists journal to DB, JSONL may be empty.
|
| 291 |
+
if db_file.exists() and (
|
| 292 |
+
(not trades) or (not equity) or (not signals)
|
| 293 |
+
):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 294 |
try:
|
| 295 |
+
from trading.db_store import TradingStore
|
| 296 |
+
|
| 297 |
+
store = TradingStore(db_path=str(db_file))
|
| 298 |
+
if not trades:
|
| 299 |
+
trades = store.get_all_trades()
|
| 300 |
+
if not equity:
|
| 301 |
+
equity = store.get_equity_curve()
|
| 302 |
+
if not signals:
|
| 303 |
+
signals = store.get_signals(limit=500)
|
| 304 |
+
except Exception as e:
|
| 305 |
+
# Keep file-based fallback; surface the error for diagnostics.
|
| 306 |
+
status = status or {}
|
| 307 |
+
status["db_fallback_error"] = str(e)
|
| 308 |
|
| 309 |
+
# Eligible stocks
|
| 310 |
+
eligible = []
|
| 311 |
+
try:
|
| 312 |
+
s2 = (scan_data or {}).get("stage2", {})
|
| 313 |
+
eligible = sorted([sym for sym, v in s2.items() if v.get("eligible")])
|
| 314 |
+
except Exception:
|
| 315 |
+
pass
|
| 316 |
+
|
| 317 |
+
open_trades = [t for t in trades if not t.get("is_closed")]
|
| 318 |
+
closed_trades = [t for t in trades if t.get("is_closed")]
|
| 319 |
+
|
| 320 |
+
total_pnl = sum(t.get("net_pnl", 0) for t in closed_trades)
|
| 321 |
+
winners = [t for t in closed_trades if (t.get("net_pnl", 0)) > 0]
|
| 322 |
+
losers = [t for t in closed_trades if (t.get("net_pnl", 0)) < 0]
|
| 323 |
+
win_rate = (len(winners) / len(closed_trades) * 100) if closed_trades else 0
|
| 324 |
+
avg_win = (sum(t.get("net_pnl", 0) for t in winners) / len(winners)) if winners else 0
|
| 325 |
+
avg_loss = (abs(sum(t.get("net_pnl", 0) for t in losers)) / len(losers)) if losers else 0
|
| 326 |
+
profit_factor = (avg_win / avg_loss) if avg_loss > 0 else (float("inf") if avg_win > 0 else 0)
|
| 327 |
+
best_trade = max((t.get("return_pct", 0) for t in closed_trades), default=0)
|
| 328 |
+
worst_trade = min((t.get("return_pct", 0) for t in closed_trades), default=0)
|
| 329 |
+
|
| 330 |
+
# Per-symbol breakdown
|
| 331 |
+
symbol_stats = {}
|
| 332 |
+
for t in closed_trades:
|
| 333 |
+
sym = t.get("symbol", "?")
|
| 334 |
+
if sym not in symbol_stats:
|
| 335 |
+
symbol_stats[sym] = {"trades": 0, "pnl": 0, "wins": 0}
|
| 336 |
+
symbol_stats[sym]["trades"] += 1
|
| 337 |
+
symbol_stats[sym]["pnl"] += t.get("net_pnl", 0)
|
| 338 |
+
if t.get("net_pnl", 0) > 0:
|
| 339 |
+
symbol_stats[sym]["wins"] += 1
|
| 340 |
+
|
| 341 |
+
cash = float((state or {}).get("broker_cash", 100000))
|
| 342 |
+
positions = (state or {}).get("broker_positions", {})
|
| 343 |
+
latest_eq = equity[-1] if equity else None
|
| 344 |
+
|
| 345 |
+
last_worker_status = None
|
| 346 |
+
last_worker_reason = None
|
| 347 |
+
last_result = None
|
| 348 |
+
if isinstance(status, dict):
|
| 349 |
+
last_worker_status = status.get("status")
|
| 350 |
+
last_worker_reason = status.get("reason")
|
| 351 |
+
last_result = status.get("last_result")
|
| 352 |
+
# Back-compat: sometimes only last_result exists
|
| 353 |
if not last_worker_status and isinstance(last_result, dict):
|
| 354 |
last_worker_status = last_result.get("status")
|
| 355 |
if not last_worker_reason and isinstance(last_result, dict):
|
| 356 |
last_worker_reason = last_result.get("reason")
|
| 357 |
|
| 358 |
+
return {
|
| 359 |
+
"status": {
|
| 360 |
+
"isRunning": (status or {}).get("is_running", False),
|
| 361 |
+
"lastRunDate": (state or {}).get("last_run_date"),
|
| 362 |
+
"totalDaysRun": (state or {}).get("total_days_run", 0),
|
| 363 |
+
"totalTrades": (state or {}).get("total_trades", 0),
|
| 364 |
+
"currentPhase": (status or {}).get("current_phase"),
|
| 365 |
+
"killSwitchActive": (kill_switch or {}).get("active", False),
|
| 366 |
+
"killSwitchReason": (kill_switch or {}).get("reason"),
|
| 367 |
+
"workerRunning": worker_running,
|
| 368 |
+
"workerPid": worker_pid,
|
| 369 |
+
"lastWorkerStatus": last_worker_status,
|
| 370 |
+
"lastWorkerReason": last_worker_reason,
|
| 371 |
+
"lastResult": last_result,
|
| 372 |
+
},
|
| 373 |
+
"portfolio": {
|
| 374 |
+
"cash": round(cash, 2),
|
| 375 |
+
"equity": latest_eq.get("equity", cash) if latest_eq else cash,
|
| 376 |
+
"positionCount": len(positions),
|
| 377 |
+
"positions": [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 378 |
{
|
| 379 |
+
"symbol": sym,
|
| 380 |
+
"quantity": pos.get("qty"),
|
| 381 |
+
"avgCost": pos.get("avg_cost"),
|
| 382 |
+
"entryDate": pos.get("entry_date"),
|
| 383 |
+
"notional": (pos.get("qty", 0)) * (pos.get("avg_cost", 0)),
|
|
|
|
|
|
|
| 384 |
}
|
| 385 |
+
for sym, pos in positions.items()
|
| 386 |
],
|
| 387 |
+
"pnlPct": round(((latest_eq.get("equity", cash) / 100000 - 1) * 100), 2) if latest_eq else 0,
|
| 388 |
+
"unrealizedPnl": latest_eq.get("unrealized_pnl", 0) if latest_eq else 0,
|
| 389 |
+
},
|
| 390 |
+
"equityCurve": [
|
| 391 |
+
{
|
| 392 |
+
"date": e.get("date"),
|
| 393 |
+
"equity": e.get("equity"),
|
| 394 |
+
"cash": e.get("cash"),
|
| 395 |
+
"positions": e.get("positions_count"),
|
| 396 |
+
"realizedPnl": e.get("realized_pnl_today", 0),
|
| 397 |
+
}
|
| 398 |
+
for e in equity
|
| 399 |
+
],
|
| 400 |
+
"openTrades": [
|
| 401 |
+
{
|
| 402 |
+
"symbol": t["symbol"],
|
| 403 |
+
"quantity": t.get("quantity"),
|
| 404 |
+
"entryPrice": t.get("entry_price"),
|
| 405 |
+
"entryDate": t.get("entry_date"),
|
| 406 |
+
"confidence": t.get("signal_confidence"),
|
| 407 |
+
"predictedReturn": t.get("predicted_return"),
|
| 408 |
+
"commission": t.get("entry_commission"),
|
| 409 |
+
}
|
| 410 |
+
for t in open_trades
|
| 411 |
+
],
|
| 412 |
+
"closedTrades": [
|
| 413 |
+
{
|
| 414 |
+
"symbol": t["symbol"],
|
| 415 |
+
"quantity": t.get("quantity"),
|
| 416 |
+
"entryPrice": t.get("entry_price"),
|
| 417 |
+
"exitPrice": t.get("exit_price"),
|
| 418 |
+
"entryDate": t.get("entry_date"),
|
| 419 |
+
"exitDate": t.get("exit_date"),
|
| 420 |
+
"netPnl": t.get("net_pnl"),
|
| 421 |
+
"returnPct": t.get("return_pct"),
|
| 422 |
+
"holdingDays": t.get("holding_days"),
|
| 423 |
+
"exitReason": t.get("exit_reason"),
|
| 424 |
+
}
|
| 425 |
+
for t in closed_trades
|
| 426 |
+
],
|
| 427 |
+
"performance": {
|
| 428 |
+
"totalPnl": round(total_pnl, 2),
|
| 429 |
+
"closedTradesCount": len(closed_trades),
|
| 430 |
+
"openTradesCount": len(open_trades),
|
| 431 |
+
"winRate": round(win_rate, 1),
|
| 432 |
+
"profitFactor": "Inf" if profit_factor == float("inf") else round(profit_factor, 2),
|
| 433 |
+
"avgWin": round(avg_win, 2),
|
| 434 |
+
"avgLoss": round(avg_loss, 2),
|
| 435 |
+
"bestTrade": round(best_trade, 2),
|
| 436 |
+
"worstTrade": round(worst_trade, 2),
|
| 437 |
+
"symbolBreakdown": [
|
| 438 |
{
|
| 439 |
+
"symbol": sym,
|
| 440 |
+
"trades": s["trades"],
|
| 441 |
+
"pnl": round(s["pnl"], 2),
|
| 442 |
+
"winRate": round((s["wins"] / s["trades"]) * 100) if s["trades"] > 0 else 0,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 443 |
}
|
| 444 |
+
for sym, s in symbol_stats.items()
|
| 445 |
],
|
| 446 |
+
},
|
| 447 |
+
"signals": [
|
| 448 |
+
{
|
| 449 |
+
"date": s.get("date"),
|
| 450 |
+
"symbol": s.get("symbol"),
|
| 451 |
+
"signal": s.get("signal"),
|
| 452 |
+
"mlSignal": s.get("ml_signal"),
|
| 453 |
+
"techSignal": s.get("tech_signal"),
|
| 454 |
+
"confidence": s.get("confidence"),
|
| 455 |
+
"predictedReturn": s.get("predicted_return"),
|
| 456 |
+
"actionTaken": s.get("action_taken"),
|
| 457 |
+
}
|
| 458 |
+
for s in signals[-50:]
|
| 459 |
+
][::-1],
|
| 460 |
+
"eligibleStocks": eligible,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 461 |
"timestamp": datetime.now().isoformat(),
|
| 462 |
}
|
| 463 |
|
|
|
|
| 479 |
symbols: List[str]
|
| 480 |
days_ahead: int = 7
|
| 481 |
model: str = "ensemble" # ensemble, xgboost, lightgbm, rf
|
|
|
|
| 482 |
|
| 483 |
|
| 484 |
@app.get("/")
|
|
|
|
| 569 |
symbols=request.symbols,
|
| 570 |
days_ahead=request.days_ahead,
|
| 571 |
model_type=request.model,
|
|
|
|
| 572 |
)
|
| 573 |
|
| 574 |
# If data providers rate-limit / return empty, avoid 500 and let UI degrade gracefully.
|
|
|
|
| 583 |
return {
|
| 584 |
"model": request.model,
|
| 585 |
"days_ahead": request.days_ahead,
|
|
|
|
| 586 |
"predictions": results
|
| 587 |
}
|
| 588 |
except HTTPException:
|
|
|
|
| 1173 |
# If dead, restart it automatically. This survives HF Space container recycles.
|
| 1174 |
|
| 1175 |
def _worker_watchdog():
|
| 1176 |
+
"""Background thread: checks worker PID every 5 minutes, restarts if dead."""
|
| 1177 |
import subprocess
|
| 1178 |
+
pid_file = Path("paper_trading/auto_trader/worker.pid")
|
|
|
|
|
|
|
|
|
|
| 1179 |
CHECK_INTERVAL = 300 # 5 minutes
|
| 1180 |
|
| 1181 |
# Give the worker (started by start.sh) time to boot
|
|
|
|
| 1183 |
|
| 1184 |
while True:
|
| 1185 |
try:
|
| 1186 |
+
worker_alive = False
|
| 1187 |
+
if pid_file.exists():
|
| 1188 |
+
try:
|
| 1189 |
+
pid = int(pid_file.read_text().strip())
|
| 1190 |
+
os.kill(pid, 0) # signal 0 = check if alive
|
| 1191 |
+
worker_alive = True
|
| 1192 |
+
except (ProcessLookupError, ValueError, OSError):
|
| 1193 |
+
# PID stale — clean up
|
| 1194 |
try:
|
| 1195 |
+
pid_file.unlink(missing_ok=True)
|
| 1196 |
+
except Exception:
|
| 1197 |
+
pass
|
| 1198 |
+
|
| 1199 |
+
if not worker_alive:
|
| 1200 |
+
print("[watchdog] Worker not running — restarting...")
|
| 1201 |
+
cwd = str(Path(__file__).parent)
|
| 1202 |
+
subprocess.Popen(
|
| 1203 |
+
[sys.executable, "-m", "trading.worker", "--daemon", "--mode", "paper", "--interval", "3600"],
|
| 1204 |
+
cwd=cwd,
|
| 1205 |
+
stdout=subprocess.DEVNULL,
|
| 1206 |
+
stderr=subprocess.DEVNULL,
|
| 1207 |
+
start_new_session=True,
|
| 1208 |
+
)
|
| 1209 |
+
_time.sleep(5) # give it time to start
|
| 1210 |
+
if pid_file.exists():
|
| 1211 |
+
print(f"[watchdog] Worker restarted (PID={pid_file.read_text().strip()})")
|
| 1212 |
+
else:
|
| 1213 |
+
print("[watchdog] Worker restart attempted but no PID file yet")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1214 |
except Exception as e:
|
| 1215 |
print(f"[watchdog] Error: {e}")
|
| 1216 |
|
| 1217 |
_time.sleep(CHECK_INTERVAL)
|
| 1218 |
|
| 1219 |
|
| 1220 |
+
_watchdog_thread = threading.Thread(target=_worker_watchdog, daemon=True, name="worker-watchdog")
|
| 1221 |
+
_watchdog_thread.start()
|
| 1222 |
+
print("[watchdog] Worker health watchdog started (checks every 5 min)")
|
| 1223 |
+
|
| 1224 |
+
|
| 1225 |
# ─── Trading API Endpoints ──────────────────────────────────────────────────
|
| 1226 |
|
| 1227 |
|
|
|
|
| 1245 |
@app.post("/api/trading")
|
| 1246 |
def trading_action(req: TradingActionRequest):
|
| 1247 |
"""Execute trading actions: run, start_worker, stop_worker, kill, unkill."""
|
|
|
|
| 1248 |
paper_dir = Path("paper_trading")
|
| 1249 |
+
kill_file = paper_dir / "kill_switch.json"
|
| 1250 |
+
pid_file = paper_dir / "auto_trader" / "worker.pid"
|
|
|
|
|
|
|
| 1251 |
|
| 1252 |
if req.action == "run":
|
| 1253 |
try:
|
| 1254 |
from trading.worker import TradingWorker
|
| 1255 |
+
worker = TradingWorker(mode="paper")
|
| 1256 |
result = worker.run_cycle(force=False)
|
| 1257 |
return {"success": True, "output": str(result), "result": result}
|
| 1258 |
except Exception as e:
|
|
|
|
| 1261 |
if req.action == "run_force":
|
| 1262 |
try:
|
| 1263 |
from trading.worker import TradingWorker
|
| 1264 |
+
worker = TradingWorker(mode="paper")
|
| 1265 |
result = worker.run_cycle(force=True)
|
| 1266 |
return {"success": True, "output": str(result), "result": result}
|
| 1267 |
except Exception as e:
|
|
|
|
| 1287 |
|
| 1288 |
cwd = str(Path(__file__).parent)
|
| 1289 |
proc = subprocess.Popen(
|
| 1290 |
+
[sys.executable, "-m", "trading.worker", "--daemon", "--mode", "paper", "--interval", "3600"],
|
| 1291 |
cwd=cwd,
|
| 1292 |
stdout=subprocess.DEVNULL,
|
| 1293 |
stderr=subprocess.DEVNULL,
|
|
|
|
| 1329 |
return {"error": f"Worker durdurma hatası: {str(e)}"}
|
| 1330 |
|
| 1331 |
elif req.action == "kill":
|
| 1332 |
+
paper_dir.mkdir(parents=True, exist_ok=True)
|
| 1333 |
+
(paper_dir / "auto_trader").mkdir(parents=True, exist_ok=True)
|
| 1334 |
kill_file.write_text(json.dumps({
|
| 1335 |
"active": True,
|
| 1336 |
"reason": req.reason or "Manual kill switch from UI",
|
| 1337 |
"activated_at": datetime.now().isoformat(),
|
|
|
|
| 1338 |
}))
|
| 1339 |
+
return {"success": True, "action": "kill", "timestamp": datetime.now().isoformat()}
|
| 1340 |
|
| 1341 |
elif req.action == "unkill":
|
| 1342 |
try:
|
| 1343 |
kill_file.unlink(missing_ok=True)
|
| 1344 |
except Exception:
|
| 1345 |
pass
|
| 1346 |
+
return {"success": True, "action": "unkill", "timestamp": datetime.now().isoformat()}
|
| 1347 |
|
| 1348 |
elif req.action == "scan":
|
| 1349 |
+
# Trigger scan in background (supports both BIST and US)
|
| 1350 |
global _scan_thread, _scan_status
|
| 1351 |
import threading
|
| 1352 |
|
| 1353 |
if _scan_status.get("running"):
|
| 1354 |
return {"success": False, "error": "Tarama zaten çalışıyor", "scanStatus": _scan_status}
|
| 1355 |
|
| 1356 |
+
# Determine market and universe
|
| 1357 |
+
market_id = (req.market or "bist").strip().lower()
|
| 1358 |
+
if market_id == "us":
|
| 1359 |
+
universe = (req.universe or req.reason or "sp100").strip().lower()
|
| 1360 |
+
else:
|
| 1361 |
+
universe = (req.universe or req.reason or "bist30").strip().lower()
|
| 1362 |
force = bool(req.force) if req.force is not None else False
|
| 1363 |
+
title = "BIST" if market_id == "bist" else market_id.upper()
|
| 1364 |
|
| 1365 |
def _run_scan():
|
| 1366 |
global _scan_status
|
| 1367 |
try:
|
|
|
|
| 1368 |
_scan_status = {
|
| 1369 |
"running": True,
|
| 1370 |
+
"progress": f"{universe.upper()} ({title}) taraması başlatılıyor...",
|
| 1371 |
"started_at": datetime.now().isoformat(),
|
| 1372 |
"universe": universe,
|
| 1373 |
+
"market_id": market_id,
|
| 1374 |
}
|
| 1375 |
+
from trading.scanner_engine import ScanConfig, run_scan as _engine_scan
|
| 1376 |
+
import logging as _logging
|
| 1377 |
+
_scan_logger = _logging.getLogger(f"manual-scan-{market_id}")
|
| 1378 |
+
config = ScanConfig(market_id=market_id, universe_name=universe, title=title)
|
| 1379 |
+
_scan_status["progress"] = f"Stage 1: Likidite filtresi ({title})..."
|
| 1380 |
+
_engine_scan(config, _scan_logger, force=force, stage1_only=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1381 |
_scan_status = {
|
| 1382 |
"running": False,
|
| 1383 |
+
"progress": f"Tarama tamamlandı ({title})!",
|
| 1384 |
"finished_at": datetime.now().isoformat(),
|
| 1385 |
"universe": universe,
|
| 1386 |
+
"market_id": market_id,
|
| 1387 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1388 |
except Exception as e:
|
| 1389 |
_scan_status = {
|
| 1390 |
"running": False,
|
| 1391 |
+
"progress": f"Tarama hatası ({title}): {e}",
|
| 1392 |
"error": str(e),
|
| 1393 |
"universe": universe,
|
| 1394 |
+
"market_id": market_id,
|
| 1395 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1396 |
|
| 1397 |
+
_scan_thread = threading.Thread(target=_run_scan, daemon=True, name=f"{market_id}-scan")
|
| 1398 |
_scan_thread.start()
|
| 1399 |
return {
|
| 1400 |
"success": True,
|
| 1401 |
+
"message": f"{universe.upper()} ({title}) taraması arka planda başlatıldı",
|
| 1402 |
"scanStatus": _scan_status,
|
| 1403 |
}
|
| 1404 |
|
|
|
|
| 1411 |
|
| 1412 |
class TradingSyncPayload(BaseModel):
|
| 1413 |
sync_key: str
|
|
|
|
| 1414 |
state: Optional[dict] = None
|
| 1415 |
status: Optional[dict] = None
|
| 1416 |
trades: Optional[list] = None
|
| 1417 |
equity_curve: Optional[list] = None
|
| 1418 |
signals: Optional[list] = None
|
| 1419 |
scan_results: Optional[dict] = None
|
| 1420 |
+
market_id: Optional[str] = None
|
| 1421 |
|
| 1422 |
|
| 1423 |
@app.post("/api/trading/sync")
|
|
|
|
| 1428 |
raise HTTPException(status_code=403, detail="Invalid sync key")
|
| 1429 |
|
| 1430 |
paper_dir = Path("paper_trading")
|
| 1431 |
+
auto_dir = paper_dir / "auto_trader"
|
| 1432 |
+
journal_dir = paper_dir / "journal"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1433 |
paper_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
| 1434 |
auto_dir.mkdir(parents=True, exist_ok=True)
|
| 1435 |
journal_dir.mkdir(parents=True, exist_ok=True)
|
| 1436 |
|
|
|
|
| 1464 |
_atomic_write(journal_dir / "signals_log.jsonl", "\n".join(lines) + "\n" if lines else "")
|
| 1465 |
synced.append(f"signals({len(payload.signals)})")
|
| 1466 |
|
| 1467 |
+
# 6. scan_results — write to the correct market-specific path
|
| 1468 |
+
sync_market = (payload.market_id or "bist").strip().lower()
|
| 1469 |
if payload.scan_results is not None:
|
| 1470 |
+
from trading.market_registry import get_scan_results_path
|
| 1471 |
+
scan_path = get_scan_results_path(sync_market, completed=True)
|
| 1472 |
+
scan_path.parent.mkdir(parents=True, exist_ok=True)
|
| 1473 |
+
_atomic_write(scan_path, json.dumps(payload.scan_results, indent=2, default=str))
|
| 1474 |
+
synced.append(f"scan_results({sync_market})")
|
| 1475 |
|
| 1476 |
# 7. Update in-memory cache (survives ephemeral filesystem resets)
|
| 1477 |
global _remote_cache
|
| 1478 |
+
scan_cache_key = f"scan_results:{sync_market}"
|
| 1479 |
+
_remote_cache["state"] = payload.state or _remote_cache.get("state")
|
| 1480 |
+
_remote_cache["status"] = payload.status or _remote_cache.get("status")
|
| 1481 |
+
if payload.trades is not None:
|
| 1482 |
+
_remote_cache["trades"] = payload.trades
|
| 1483 |
+
if payload.equity_curve is not None:
|
| 1484 |
+
_remote_cache["equity_curve"] = payload.equity_curve
|
| 1485 |
+
if payload.signals is not None:
|
| 1486 |
+
_remote_cache["signals"] = payload.signals
|
| 1487 |
+
if payload.scan_results:
|
| 1488 |
+
_remote_cache[scan_cache_key] = payload.scan_results
|
| 1489 |
+
_remote_cache["updated_at"] = datetime.now().isoformat()
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1490 |
|
| 1491 |
return {
|
| 1492 |
"success": True,
|
|
|
|
| 1503 |
|
| 1504 |
|
| 1505 |
@app.get("/api/eligible")
|
| 1506 |
+
def get_eligible_stocks(market: str = Query("bist")):
|
| 1507 |
"""
|
| 1508 |
+
GET /api/eligible?market=bist|us
|
| 1509 |
+
Returns list of eligible stocks from scan results
|
| 1510 |
"""
|
| 1511 |
+
from trading.market_registry import get_scan_results_path
|
|
|
|
|
|
|
| 1512 |
|
| 1513 |
+
market_id = market.strip().lower() if market else "bist"
|
| 1514 |
+
final_scan_file = get_scan_results_path(market_id, completed=True)
|
| 1515 |
+
work_scan_file = get_scan_results_path(market_id, completed=False)
|
| 1516 |
|
| 1517 |
+
def _timestamp_of(value):
|
| 1518 |
+
try:
|
| 1519 |
+
return datetime.fromisoformat(str(value).replace("Z", "+00:00")).timestamp()
|
| 1520 |
+
except Exception:
|
| 1521 |
+
return 0.0
|
| 1522 |
|
| 1523 |
+
if not final_scan_file.exists() and not work_scan_file.exists():
|
| 1524 |
return {
|
| 1525 |
"ok": False,
|
| 1526 |
+
"error": f"Scan results not found for {market_id}. Run scan first.",
|
| 1527 |
+
"scanRunning": _scan_status.get("running", False),
|
| 1528 |
+
"scanProgress": _scan_status.get("progress", ""),
|
| 1529 |
"scanCompleted": False,
|
| 1530 |
+
"universe": _scan_status.get("universe"),
|
|
|
|
| 1531 |
"totalStocks": None,
|
| 1532 |
"stage1Done": None,
|
| 1533 |
"stage2Done": None,
|
|
|
|
| 1547 |
}
|
| 1548 |
|
| 1549 |
try:
|
| 1550 |
+
final_data = None
|
| 1551 |
+
work_data = None
|
| 1552 |
+
|
| 1553 |
+
if final_scan_file.exists():
|
| 1554 |
+
with open(final_scan_file, "r", encoding="utf-8") as f:
|
| 1555 |
+
final_data = json.load(f)
|
| 1556 |
+
|
| 1557 |
+
if work_scan_file.exists():
|
| 1558 |
+
with open(work_scan_file, "r", encoding="utf-8") as f:
|
| 1559 |
+
work_data = json.load(f)
|
| 1560 |
+
|
| 1561 |
+
final_ts = max(
|
| 1562 |
+
_timestamp_of((final_data or {}).get("updated_at")),
|
| 1563 |
+
_timestamp_of((final_data or {}).get("scan_finished")),
|
| 1564 |
+
_timestamp_of((final_data or {}).get("scan_started")),
|
| 1565 |
+
)
|
| 1566 |
+
work_ts = max(
|
| 1567 |
+
_timestamp_of((work_data or {}).get("updated_at")),
|
| 1568 |
+
_timestamp_of((work_data or {}).get("scan_finished")),
|
| 1569 |
+
_timestamp_of((work_data or {}).get("scan_started")),
|
| 1570 |
+
)
|
| 1571 |
+
|
| 1572 |
+
using_work_file = bool(work_data) and (
|
| 1573 |
+
not final_data
|
| 1574 |
+
or not work_data.get("completed", False)
|
| 1575 |
+
or work_ts >= final_ts
|
| 1576 |
+
)
|
| 1577 |
+
data = work_data if using_work_file else final_data
|
| 1578 |
+
if data is None:
|
| 1579 |
+
raise RuntimeError("Scan payload is empty")
|
| 1580 |
|
| 1581 |
completed = bool(data.get("completed", False))
|
| 1582 |
universe = data.get("universe")
|
|
|
|
| 1621 |
stage1_total = 0
|
| 1622 |
|
| 1623 |
# Be explicit: stage2 empty = scan running, or stage1 filtered everything.
|
| 1624 |
+
base_error = f"Scan results not ready yet for {market_id}. Run scan first."
|
| 1625 |
+
if not _scan_status.get("running", False) and stage1_passed_count == 0 and (stage1_failures or stage1):
|
| 1626 |
+
base_error = f"Stage 2 empty ({market_id}): Stage 1 filtered all stocks (no symbols passed)."
|
| 1627 |
+
|
| 1628 |
+
scan_running = bool(_scan_status.get("running", False) or using_work_file or (not completed and stage1_total > 0))
|
| 1629 |
+
live_progress = str(_scan_status.get("progress", "") or "")
|
| 1630 |
+
if scan_running and live_progress.startswith("Stage 2:"):
|
| 1631 |
+
scan_progress = live_progress
|
| 1632 |
+
elif scan_running and stage1_total > 0:
|
| 1633 |
+
failed_count = len(stage1_failures)
|
| 1634 |
+
total_for_msg = stage1_total or total_stocks or "?"
|
| 1635 |
+
scan_progress = (
|
| 1636 |
+
f"Stage 1 tamamlandı: {stage1_passed_count}/{total_for_msg} ge\u00e7ti, "
|
| 1637 |
+
f"{failed_count} elendi. Stage 2 ba\u015flamak \u00fczere."
|
| 1638 |
+
)
|
| 1639 |
+
else:
|
| 1640 |
+
scan_progress = live_progress or f"Stage 1: {stage1_passed_count}/{stage1_total or total_stocks or '?'} ge\u00e7ti"
|
| 1641 |
|
| 1642 |
return {
|
| 1643 |
"ok": False,
|
|
|
|
| 1663 |
"avgHitRate": 0,
|
| 1664 |
},
|
| 1665 |
"timestamp": data.get("scan_finished") or data.get("updated_at") or data.get("scan_started"),
|
|
|
|
| 1666 |
}
|
| 1667 |
|
| 1668 |
eligible = []
|
|
|
|
| 1741 |
avg_hit_rate = sum(e.get("hit_rate", 0) for e in eligible) / len(eligible) if eligible else 0
|
| 1742 |
|
| 1743 |
# Resolve scan progress for ok:true (stage2 has data)
|
| 1744 |
+
scan_running_flag = bool(_scan_status.get("running", False) or using_work_file or (not completed and stage2_done > 0))
|
| 1745 |
if not completed:
|
| 1746 |
+
excluded_count = len(excluded)
|
| 1747 |
+
eligible_count = len(eligible)
|
| 1748 |
+
total_stage2 = stage1_passed_count or '?'
|
| 1749 |
+
scan_progress_str = (
|
| 1750 |
+
f"Stage 2 devam ediyor: {stage2_done}/{total_stage2} tamamlandı. "
|
| 1751 |
+
f"Eligible: {eligible_count}, Elenen: {excluded_count}."
|
| 1752 |
+
)
|
| 1753 |
else:
|
| 1754 |
+
scan_progress_str = _scan_status.get("progress", "") or "Tarama tamamlandı"
|
| 1755 |
|
| 1756 |
return {
|
| 1757 |
"ok": True,
|
|
|
|
| 1759 |
"scanProgress": scan_progress_str,
|
| 1760 |
"scanCompleted": completed,
|
| 1761 |
"universe": universe,
|
|
|
|
| 1762 |
"totalStocks": total_stocks,
|
| 1763 |
"stage1Done": stage1_passed_count + len(stage1_failures),
|
| 1764 |
"stage2Done": stage2_done,
|
|
|
|
| 1782 |
return {
|
| 1783 |
"ok": False,
|
| 1784 |
"error": str(e),
|
| 1785 |
+
"scanRunning": _scan_status.get("running", False),
|
| 1786 |
+
"scanProgress": _scan_status.get("progress", ""),
|
|
|
|
| 1787 |
}
|
| 1788 |
|
| 1789 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1790 |
# For Hugging Face Spaces
|
| 1791 |
if __name__ == "__main__":
|
| 1792 |
import uvicorn
|
huggingface-space/data/index_constituents.py
CHANGED
|
@@ -57,7 +57,7 @@ def _normalize_universe_name(name: str) -> str:
|
|
| 57 |
return "bist50"
|
| 58 |
if key in ("bist 30", "bist30", "xu030", "xu30", "xu 30", "xu-30"):
|
| 59 |
return "bist30"
|
| 60 |
-
if key in ("all", "tum", "tüm", "tum bist", "tüm bist", "hepsi"):
|
| 61 |
return "all"
|
| 62 |
return key
|
| 63 |
|
|
|
|
| 57 |
return "bist50"
|
| 58 |
if key in ("bist 30", "bist30", "xu030", "xu30", "xu 30", "xu-30"):
|
| 59 |
return "bist30"
|
| 60 |
+
if key in ("all", "bist_all", "bistall", "tum", "tüm", "tum bist", "tüm bist", "hepsi"):
|
| 61 |
return "all"
|
| 62 |
return key
|
| 63 |
|
huggingface-space/nextjs-app/package.json
CHANGED
|
@@ -6,8 +6,7 @@
|
|
| 6 |
"dev": "next dev -p 3001",
|
| 7 |
"build": "next build",
|
| 8 |
"start": "next start",
|
| 9 |
-
"lint": "next lint"
|
| 10 |
-
"smoke:trading-auth": "node ./scripts/trading-auth-smoke.mjs"
|
| 11 |
},
|
| 12 |
"dependencies": {
|
| 13 |
"@supabase/ssr": "^0.8.0",
|
|
|
|
| 6 |
"dev": "next dev -p 3001",
|
| 7 |
"build": "next build",
|
| 8 |
"start": "next start",
|
| 9 |
+
"lint": "next lint"
|
|
|
|
| 10 |
},
|
| 11 |
"dependencies": {
|
| 12 |
"@supabase/ssr": "^0.8.0",
|
huggingface-space/nextjs-app/scripts/trading-auth-smoke.mjs
DELETED
|
@@ -1,250 +0,0 @@
|
|
| 1 |
-
import { createBrowserClient } from '@supabase/ssr'
|
| 2 |
-
import crypto from 'node:crypto'
|
| 3 |
-
import fs from 'node:fs'
|
| 4 |
-
import path from 'node:path'
|
| 5 |
-
|
| 6 |
-
function loadEnvFile(filePath) {
|
| 7 |
-
if (!fs.existsSync(filePath)) return
|
| 8 |
-
const content = fs.readFileSync(filePath, 'utf8')
|
| 9 |
-
for (const rawLine of content.split(/\r?\n/)) {
|
| 10 |
-
const line = rawLine.trim()
|
| 11 |
-
if (!line || line.startsWith('#')) continue
|
| 12 |
-
const separatorIndex = line.indexOf('=')
|
| 13 |
-
if (separatorIndex === -1) continue
|
| 14 |
-
const key = line.slice(0, separatorIndex).trim()
|
| 15 |
-
const value = line.slice(separatorIndex + 1).trim()
|
| 16 |
-
if (!(key in process.env)) {
|
| 17 |
-
process.env[key] = value
|
| 18 |
-
}
|
| 19 |
-
}
|
| 20 |
-
}
|
| 21 |
-
|
| 22 |
-
function getEnv(name) {
|
| 23 |
-
const value = process.env[name]
|
| 24 |
-
if (!value) {
|
| 25 |
-
throw new Error(`Missing required environment variable: ${name}`)
|
| 26 |
-
}
|
| 27 |
-
return value
|
| 28 |
-
}
|
| 29 |
-
|
| 30 |
-
function parseArgs(argv) {
|
| 31 |
-
const options = {
|
| 32 |
-
baseUrl: process.env.SMOKE_BASE_URL || 'http://127.0.0.1:3012',
|
| 33 |
-
market: process.env.SMOKE_MARKET || 'us',
|
| 34 |
-
reason: 'trading-auth-smoke',
|
| 35 |
-
}
|
| 36 |
-
|
| 37 |
-
for (let index = 0; index < argv.length; index += 1) {
|
| 38 |
-
const arg = argv[index]
|
| 39 |
-
if (arg === '--base-url') options.baseUrl = argv[index + 1]
|
| 40 |
-
if (arg === '--market') options.market = argv[index + 1]
|
| 41 |
-
}
|
| 42 |
-
|
| 43 |
-
return options
|
| 44 |
-
}
|
| 45 |
-
|
| 46 |
-
function summarizeTradingPayload(payload, status) {
|
| 47 |
-
return {
|
| 48 |
-
status,
|
| 49 |
-
hasStatus: Object.prototype.hasOwnProperty.call(payload, 'status'),
|
| 50 |
-
hasPortfolio: Object.prototype.hasOwnProperty.call(payload, 'portfolio'),
|
| 51 |
-
hasMarkets: Boolean(payload?.markets && typeof payload.markets === 'object' && !Array.isArray(payload.markets)),
|
| 52 |
-
marketKeys: payload?.markets ? Object.keys(payload.markets).sort() : [],
|
| 53 |
-
activeMarketId: payload?.activeMarketId ?? null,
|
| 54 |
-
bistHasStatus: Boolean(payload?.markets?.bist?.status),
|
| 55 |
-
usHasStatus: Boolean(payload?.markets?.us?.status),
|
| 56 |
-
}
|
| 57 |
-
}
|
| 58 |
-
|
| 59 |
-
async function requestJson(url, options = {}) {
|
| 60 |
-
const response = await fetch(url, options)
|
| 61 |
-
const text = await response.text()
|
| 62 |
-
let payload
|
| 63 |
-
try {
|
| 64 |
-
payload = JSON.parse(text)
|
| 65 |
-
} catch {
|
| 66 |
-
payload = { raw: text }
|
| 67 |
-
}
|
| 68 |
-
return { response, payload }
|
| 69 |
-
}
|
| 70 |
-
|
| 71 |
-
async function assertOkResponse(name, result, expectedStatus = 200) {
|
| 72 |
-
if (result.response.status !== expectedStatus) {
|
| 73 |
-
throw new Error(`${name} failed with ${result.response.status}: ${JSON.stringify(result.payload)}`)
|
| 74 |
-
}
|
| 75 |
-
}
|
| 76 |
-
|
| 77 |
-
async function main() {
|
| 78 |
-
const appDir = process.cwd()
|
| 79 |
-
loadEnvFile(path.join(appDir, '.env.local'))
|
| 80 |
-
|
| 81 |
-
const { baseUrl, market, reason } = parseArgs(process.argv.slice(2))
|
| 82 |
-
const supabaseUrl = getEnv('NEXT_PUBLIC_SUPABASE_URL')
|
| 83 |
-
const supabaseAnonKey = getEnv('NEXT_PUBLIC_SUPABASE_ANON_KEY')
|
| 84 |
-
const serviceRoleKey = getEnv('SUPABASE_SERVICE_ROLE_KEY')
|
| 85 |
-
|
| 86 |
-
const cookieJar = new Map()
|
| 87 |
-
const email = `copilot-smoke-${crypto.randomBytes(6).toString('hex')}@example.com`
|
| 88 |
-
const password = `Tmp-${crypto.randomBytes(12).toString('base64url')}A1!`
|
| 89 |
-
let createdUserId = null
|
| 90 |
-
let accessToken = null
|
| 91 |
-
let cookieHeader = ''
|
| 92 |
-
|
| 93 |
-
const getCookieHeader = () => [...cookieJar.entries()].map(([name, value]) => `${name}=${value}`).join('; ')
|
| 94 |
-
|
| 95 |
-
try {
|
| 96 |
-
const createUser = await requestJson(`${supabaseUrl}/auth/v1/admin/users`, {
|
| 97 |
-
method: 'POST',
|
| 98 |
-
headers: {
|
| 99 |
-
apikey: serviceRoleKey,
|
| 100 |
-
Authorization: `Bearer ${serviceRoleKey}`,
|
| 101 |
-
'Content-Type': 'application/json',
|
| 102 |
-
},
|
| 103 |
-
body: JSON.stringify({ email, password, email_confirm: true }),
|
| 104 |
-
})
|
| 105 |
-
await assertOkResponse('createUser', createUser)
|
| 106 |
-
createdUserId = createUser.payload.id
|
| 107 |
-
|
| 108 |
-
const client = createBrowserClient(supabaseUrl, supabaseAnonKey, {
|
| 109 |
-
cookies: {
|
| 110 |
-
getAll() {
|
| 111 |
-
return [...cookieJar.entries()].map(([name, value]) => ({ name, value }))
|
| 112 |
-
},
|
| 113 |
-
setAll(cookiesToSet) {
|
| 114 |
-
for (const cookie of cookiesToSet) {
|
| 115 |
-
if (cookie.value) cookieJar.set(cookie.name, cookie.value)
|
| 116 |
-
else cookieJar.delete(cookie.name)
|
| 117 |
-
}
|
| 118 |
-
},
|
| 119 |
-
},
|
| 120 |
-
})
|
| 121 |
-
|
| 122 |
-
const signIn = await client.auth.signInWithPassword({ email, password })
|
| 123 |
-
if (signIn.error || !signIn.data.session) {
|
| 124 |
-
throw new Error(`signInWithPassword failed: ${signIn.error?.message || 'No session returned'}`)
|
| 125 |
-
}
|
| 126 |
-
|
| 127 |
-
accessToken = signIn.data.session.access_token
|
| 128 |
-
cookieHeader = getCookieHeader()
|
| 129 |
-
|
| 130 |
-
const getCookie = await requestJson(`${baseUrl}/api/trading`, {
|
| 131 |
-
headers: { Cookie: cookieHeader },
|
| 132 |
-
})
|
| 133 |
-
await assertOkResponse('getCookie', getCookie)
|
| 134 |
-
|
| 135 |
-
const getBearer = await requestJson(`${baseUrl}/api/trading`, {
|
| 136 |
-
headers: { Authorization: `Bearer ${accessToken}` },
|
| 137 |
-
})
|
| 138 |
-
await assertOkResponse('getBearer', getBearer)
|
| 139 |
-
|
| 140 |
-
const postBody = JSON.stringify({ action: 'kill', market, reason })
|
| 141 |
-
const postCookie = await requestJson(`${baseUrl}/api/trading`, {
|
| 142 |
-
method: 'POST',
|
| 143 |
-
headers: {
|
| 144 |
-
'Content-Type': 'application/json',
|
| 145 |
-
Cookie: cookieHeader,
|
| 146 |
-
},
|
| 147 |
-
body: postBody,
|
| 148 |
-
})
|
| 149 |
-
await assertOkResponse('postCookieKill', postCookie)
|
| 150 |
-
|
| 151 |
-
const postBearer = await requestJson(`${baseUrl}/api/trading`, {
|
| 152 |
-
method: 'POST',
|
| 153 |
-
headers: {
|
| 154 |
-
'Content-Type': 'application/json',
|
| 155 |
-
Authorization: `Bearer ${accessToken}`,
|
| 156 |
-
},
|
| 157 |
-
body: postBody,
|
| 158 |
-
})
|
| 159 |
-
await assertOkResponse('postBearerKill', postBearer)
|
| 160 |
-
|
| 161 |
-
const cleanupBody = JSON.stringify({ action: 'unkill', market })
|
| 162 |
-
const cleanupCookie = await requestJson(`${baseUrl}/api/trading`, {
|
| 163 |
-
method: 'POST',
|
| 164 |
-
headers: {
|
| 165 |
-
'Content-Type': 'application/json',
|
| 166 |
-
Cookie: cookieHeader,
|
| 167 |
-
},
|
| 168 |
-
body: cleanupBody,
|
| 169 |
-
})
|
| 170 |
-
await assertOkResponse('postCookieUnkill', cleanupCookie)
|
| 171 |
-
|
| 172 |
-
const cleanupBearer = await requestJson(`${baseUrl}/api/trading`, {
|
| 173 |
-
method: 'POST',
|
| 174 |
-
headers: {
|
| 175 |
-
'Content-Type': 'application/json',
|
| 176 |
-
Authorization: `Bearer ${accessToken}`,
|
| 177 |
-
},
|
| 178 |
-
body: cleanupBody,
|
| 179 |
-
})
|
| 180 |
-
await assertOkResponse('postBearerUnkill', cleanupBearer)
|
| 181 |
-
|
| 182 |
-
accessToken = null
|
| 183 |
-
cookieHeader = ''
|
| 184 |
-
|
| 185 |
-
console.log(JSON.stringify({
|
| 186 |
-
baseUrl,
|
| 187 |
-
market,
|
| 188 |
-
cookieNames: [...cookieJar.keys()].sort(),
|
| 189 |
-
getCookie: summarizeTradingPayload(getCookie.payload, getCookie.response.status),
|
| 190 |
-
getBearer: summarizeTradingPayload(getBearer.payload, getBearer.response.status),
|
| 191 |
-
postCookie: {
|
| 192 |
-
status: postCookie.response.status,
|
| 193 |
-
success: postCookie.payload?.success ?? false,
|
| 194 |
-
action: postCookie.payload?.action ?? null,
|
| 195 |
-
},
|
| 196 |
-
postBearer: {
|
| 197 |
-
status: postBearer.response.status,
|
| 198 |
-
success: postBearer.payload?.success ?? false,
|
| 199 |
-
action: postBearer.payload?.action ?? null,
|
| 200 |
-
},
|
| 201 |
-
cleanupCookie: {
|
| 202 |
-
status: cleanupCookie.response.status,
|
| 203 |
-
success: cleanupCookie.payload?.success ?? false,
|
| 204 |
-
action: cleanupCookie.payload?.action ?? null,
|
| 205 |
-
},
|
| 206 |
-
cleanupBearer: {
|
| 207 |
-
status: cleanupBearer.response.status,
|
| 208 |
-
success: cleanupBearer.payload?.success ?? false,
|
| 209 |
-
action: cleanupBearer.payload?.action ?? null,
|
| 210 |
-
},
|
| 211 |
-
}))
|
| 212 |
-
} finally {
|
| 213 |
-
if (cookieHeader) {
|
| 214 |
-
await requestJson(`${baseUrl}/api/trading`, {
|
| 215 |
-
method: 'POST',
|
| 216 |
-
headers: {
|
| 217 |
-
'Content-Type': 'application/json',
|
| 218 |
-
Cookie: cookieHeader,
|
| 219 |
-
},
|
| 220 |
-
body: JSON.stringify({ action: 'unkill', market }),
|
| 221 |
-
}).catch(() => undefined)
|
| 222 |
-
}
|
| 223 |
-
|
| 224 |
-
if (accessToken) {
|
| 225 |
-
await requestJson(`${baseUrl}/api/trading`, {
|
| 226 |
-
method: 'POST',
|
| 227 |
-
headers: {
|
| 228 |
-
'Content-Type': 'application/json',
|
| 229 |
-
Authorization: `Bearer ${accessToken}`,
|
| 230 |
-
},
|
| 231 |
-
body: JSON.stringify({ action: 'unkill', market }),
|
| 232 |
-
}).catch(() => undefined)
|
| 233 |
-
}
|
| 234 |
-
|
| 235 |
-
if (createdUserId) {
|
| 236 |
-
await fetch(`${supabaseUrl}/auth/v1/admin/users/${createdUserId}`, {
|
| 237 |
-
method: 'DELETE',
|
| 238 |
-
headers: {
|
| 239 |
-
apikey: serviceRoleKey,
|
| 240 |
-
Authorization: `Bearer ${serviceRoleKey}`,
|
| 241 |
-
},
|
| 242 |
-
}).catch(() => undefined)
|
| 243 |
-
}
|
| 244 |
-
}
|
| 245 |
-
}
|
| 246 |
-
|
| 247 |
-
main().catch((error) => {
|
| 248 |
-
console.error(error instanceof Error ? error.message : String(error))
|
| 249 |
-
process.exitCode = 1
|
| 250 |
-
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
huggingface-space/nextjs-app/src/app/ai-analysis/page.tsx
CHANGED
|
@@ -6,7 +6,6 @@ import Link from 'next/link';
|
|
| 6 |
|
| 7 |
import { fetchJson } from '@/lib/http';
|
| 8 |
import { logger } from '@/lib/logger';
|
| 9 |
-
import { useMarket } from '@/contexts/MarketContext';
|
| 10 |
|
| 11 |
type TabType = 'market' | 'stock';
|
| 12 |
|
|
@@ -35,21 +34,16 @@ interface AnalysisResult {
|
|
| 35 |
}
|
| 36 |
|
| 37 |
export default function AIAnalysisPage() {
|
| 38 |
-
const { market } = useMarket();
|
| 39 |
-
const isUS = market === 'us';
|
| 40 |
-
const currencySymbol = isUS ? '$' : '₺';
|
| 41 |
const [activeTab, setActiveTab] = useState<TabType>('stock');
|
| 42 |
const [symbol, setSymbol] = useState('');
|
| 43 |
const [analysis, setAnalysis] = useState<AnalysisResult | null>(null);
|
| 44 |
const [loading, setLoading] = useState(false);
|
| 45 |
const [marketOverview, setMarketOverview] = useState<Record<string, unknown> | null>(null);
|
| 46 |
const [quickSymbols, setQuickSymbols] = useState<string[]>([]);
|
| 47 |
-
const t = (tr: string, en: string) => isUS ? en : tr;
|
| 48 |
|
| 49 |
useEffect(() => {
|
| 50 |
let mounted = true;
|
| 51 |
-
|
| 52 |
-
fetchJson<Record<string, unknown>>(`/api/universe?name=${universeName}`, { method: 'GET' }, { timeoutMs: 20000, retries: 1 })
|
| 53 |
.then((data) => {
|
| 54 |
const symbols = Array.isArray(data?.symbols) ? data.symbols : []
|
| 55 |
if (mounted) setQuickSymbols(symbols.slice(0, 12))
|
|
@@ -60,15 +54,15 @@ export default function AIAnalysisPage() {
|
|
| 60 |
return () => {
|
| 61 |
mounted = false
|
| 62 |
}
|
| 63 |
-
}, [
|
| 64 |
|
| 65 |
const handleMarketAnalysis = async () => {
|
| 66 |
setLoading(true);
|
| 67 |
try {
|
| 68 |
-
const data = await fetchJson<Record<string, unknown>>(`/api/market-overview
|
| 69 |
setMarketOverview(data)
|
| 70 |
} catch (error) {
|
| 71 |
-
console.error(
|
| 72 |
setMarketOverview(null)
|
| 73 |
} finally {
|
| 74 |
setLoading(false);
|
|
@@ -84,12 +78,12 @@ export default function AIAnalysisPage() {
|
|
| 84 |
|
| 85 |
const [stockRaw, techRaw, mlRaw] = await Promise.all([
|
| 86 |
fetchJson<Record<string, unknown>>(
|
| 87 |
-
`/api/stock-data?symbol=${encodeURIComponent(sym)}&period=6mo&interval=1d
|
| 88 |
{ method: 'GET' },
|
| 89 |
{ timeoutMs: 20000, retries: 1 }
|
| 90 |
),
|
| 91 |
fetchJson<Record<string, unknown>>(
|
| 92 |
-
`/api/technical-analysis?symbol=${encodeURIComponent(sym)}
|
| 93 |
{ method: 'GET' },
|
| 94 |
{ timeoutMs: 20000, retries: 1 }
|
| 95 |
).catch(() => null),
|
|
@@ -99,7 +93,7 @@ export default function AIAnalysisPage() {
|
|
| 99 |
{
|
| 100 |
timeoutMs: 30000,
|
| 101 |
retries: 0,
|
| 102 |
-
jsonBody: { symbols: [sym], days_ahead: 7, model: 'ensemble'
|
| 103 |
}
|
| 104 |
).catch(() => null),
|
| 105 |
]);
|
|
@@ -109,30 +103,27 @@ export default function AIAnalysisPage() {
|
|
| 109 |
let techSignal: 'BUY' | 'SELL' | 'HOLD' = 'HOLD'
|
| 110 |
try {
|
| 111 |
const scanResp = await fetchJson<Record<string, unknown>>(
|
| 112 |
-
|
| 113 |
{ method: 'POST' },
|
| 114 |
{
|
| 115 |
timeoutMs: 30000,
|
| 116 |
retries: 0,
|
| 117 |
jsonBody: {
|
| 118 |
symbols: [sym],
|
| 119 |
-
|
|
|
|
|
|
|
| 120 |
},
|
| 121 |
}
|
| 122 |
)
|
| 123 |
|
| 124 |
-
const items
|
| 125 |
-
? ((isUS ? scanResp.signals : scanResp.data) as Record<string, unknown>[])
|
| 126 |
-
: []
|
| 127 |
const item = items.find((x: Record<string, unknown>) => String(x?.symbol || '').toUpperCase() === sym)
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
requiredOk = String(item?.signal || '').toUpperCase() === 'BUY'
|
| 131 |
-
} else if (gates && typeof gates.required_ok === 'boolean') {
|
| 132 |
-
requiredOk = gates.required_ok as boolean
|
| 133 |
}
|
| 134 |
-
if (item?.technical_signal
|
| 135 |
-
const ts = String(item.technical_signal
|
| 136 |
if (ts === 'BUY' || ts === 'SELL') techSignal = ts
|
| 137 |
}
|
| 138 |
} catch (e) {
|
|
@@ -211,8 +202,8 @@ export default function AIAnalysisPage() {
|
|
| 211 |
};
|
| 212 |
|
| 213 |
const tabs = [
|
| 214 |
-
{ id: 'market', label:
|
| 215 |
-
{ id: 'stock', label:
|
| 216 |
];
|
| 217 |
|
| 218 |
return (
|
|
@@ -222,10 +213,10 @@ export default function AIAnalysisPage() {
|
|
| 222 |
<div className="mb-6">
|
| 223 |
<div className="flex items-center gap-3 mb-2">
|
| 224 |
<Brain className="w-8 h-8 text-purple-600" />
|
| 225 |
-
<h1 className="text-3xl font-bold text-gray-900">
|
| 226 |
</div>
|
| 227 |
<p className="text-gray-600">
|
| 228 |
-
|
| 229 |
</p>
|
| 230 |
</div>
|
| 231 |
|
|
@@ -256,9 +247,9 @@ export default function AIAnalysisPage() {
|
|
| 256 |
{activeTab === 'market' && (
|
| 257 |
<div className="space-y-6">
|
| 258 |
<div className="bg-white p-6 rounded-lg shadow-sm">
|
| 259 |
-
<h2 className="text-xl font-bold text-gray-900 mb-4">
|
| 260 |
<p className="text-gray-600 mb-4">
|
| 261 |
-
|
| 262 |
</p>
|
| 263 |
<button
|
| 264 |
onClick={handleMarketAnalysis}
|
|
@@ -266,7 +257,7 @@ export default function AIAnalysisPage() {
|
|
| 266 |
className="bg-purple-600 text-white px-6 py-3 rounded-md hover:bg-purple-700 disabled:opacity-50 font-semibold flex items-center gap-2"
|
| 267 |
>
|
| 268 |
<Activity className="w-5 h-5" />
|
| 269 |
-
{loading ?
|
| 270 |
</button>
|
| 271 |
</div>
|
| 272 |
|
|
@@ -275,17 +266,17 @@ export default function AIAnalysisPage() {
|
|
| 275 |
<h3 className="font-semibold text-gray-900 mb-3">Market Overview</h3>
|
| 276 |
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
|
| 277 |
<div className="p-4 bg-gray-50 rounded-lg">
|
| 278 |
-
<p className="text-sm text-gray-600">
|
| 279 |
-
<p className="text-xl font-bold text-gray-900">{String(marketOverview.index ||
|
| 280 |
</div>
|
| 281 |
<div className="p-4 bg-gray-50 rounded-lg">
|
| 282 |
-
<p className="text-sm text-gray-600">
|
| 283 |
<p className="text-xl font-bold text-gray-900">
|
| 284 |
-
{typeof marketOverview.value === 'number' ? marketOverview.value.toLocaleString(
|
| 285 |
</p>
|
| 286 |
</div>
|
| 287 |
<div className="p-4 bg-gray-50 rounded-lg">
|
| 288 |
-
<p className="text-sm text-gray-600">
|
| 289 |
<p className={`text-xl font-bold ${(Number(marketOverview.change_percent) || 0) >= 0 ? 'text-green-600' : 'text-red-600'}`}>
|
| 290 |
{typeof marketOverview.change_percent === 'number' ? `${marketOverview.change_percent >= 0 ? '+' : ''}${marketOverview.change_percent.toFixed(2)}%` : '—'}
|
| 291 |
</p>
|
|
@@ -294,7 +285,7 @@ export default function AIAnalysisPage() {
|
|
| 294 |
</div>
|
| 295 |
) : (
|
| 296 |
<div className="bg-white p-6 rounded-lg shadow-sm">
|
| 297 |
-
<p className="text-gray-600">
|
| 298 |
</div>
|
| 299 |
)}
|
| 300 |
</div>
|
|
@@ -304,9 +295,9 @@ export default function AIAnalysisPage() {
|
|
| 304 |
{activeTab === 'stock' && (
|
| 305 |
<div className="space-y-6">
|
| 306 |
<div className="bg-white p-6 rounded-lg shadow-sm">
|
| 307 |
-
<h2 className="text-xl font-bold text-gray-900 mb-4">
|
| 308 |
<p className="text-gray-600 mb-4">
|
| 309 |
-
|
| 310 |
</p>
|
| 311 |
<div className="flex gap-4">
|
| 312 |
<div className="flex-1">
|
|
@@ -314,7 +305,7 @@ export default function AIAnalysisPage() {
|
|
| 314 |
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 w-5 h-5 text-gray-400" />
|
| 315 |
<input
|
| 316 |
type="text"
|
| 317 |
-
placeholder=
|
| 318 |
value={symbol}
|
| 319 |
onChange={(e) => setSymbol(e.target.value.toUpperCase())}
|
| 320 |
onKeyPress={(e) => e.key === 'Enter' && handleStockAnalyze()}
|
|
@@ -328,7 +319,7 @@ export default function AIAnalysisPage() {
|
|
| 328 |
className="bg-purple-600 text-white px-6 py-3 rounded-md hover:bg-purple-700 disabled:opacity-50 disabled:cursor-not-allowed font-semibold flex items-center gap-2"
|
| 329 |
>
|
| 330 |
<Brain className="w-5 h-5" />
|
| 331 |
-
{loading ?
|
| 332 |
</button>
|
| 333 |
</div>
|
| 334 |
</div>
|
|
@@ -344,7 +335,7 @@ export default function AIAnalysisPage() {
|
|
| 344 |
</div>
|
| 345 |
<div className="text-right">
|
| 346 |
<p className="text-3xl font-bold text-gray-900">
|
| 347 |
-
{typeof analysis.stock.current_price === 'number' ? `${
|
| 348 |
</p>
|
| 349 |
<p className={`text-lg font-semibold ${
|
| 350 |
(analysis.stock.change_percent ?? 0) >= 0 ? 'text-green-600' : 'text-red-600'
|
|
@@ -357,16 +348,16 @@ export default function AIAnalysisPage() {
|
|
| 357 |
</div>
|
| 358 |
<div className="grid grid-cols-3 gap-4 pt-4 border-t">
|
| 359 |
<div>
|
| 360 |
-
<p className="text-sm text-gray-600">
|
| 361 |
<p className="font-semibold">{typeof analysis.stock.volume === 'number' ? `${(analysis.stock.volume / 1000000).toFixed(1)}M` : '—'}</p>
|
| 362 |
</div>
|
| 363 |
<div>
|
| 364 |
-
<p className="text-sm text-gray-600">
|
| 365 |
-
<p className="font-semibold">{typeof analysis.stock.week_52_low === 'number' ? `${
|
| 366 |
</div>
|
| 367 |
<div>
|
| 368 |
-
<p className="text-sm text-gray-600">
|
| 369 |
-
<p className="font-semibold">{typeof analysis.stock.week_52_high === 'number' ? `${
|
| 370 |
</div>
|
| 371 |
</div>
|
| 372 |
</div>
|
|
@@ -376,7 +367,7 @@ export default function AIAnalysisPage() {
|
|
| 376 |
<div className="bg-gradient-to-r from-purple-600 to-purple-800 text-white p-6 rounded-lg shadow-lg">
|
| 377 |
<div className="flex items-center gap-2 mb-4">
|
| 378 |
<Zap className="w-6 h-6" />
|
| 379 |
-
<h3 className="text-xl font-bold">
|
| 380 |
{analysis.prediction.models_used != null && (
|
| 381 |
<span className="text-xs bg-white/20 px-2 py-1 rounded">
|
| 382 |
Ensemble ({String(analysis.prediction.models_used)} Model)
|
|
@@ -385,18 +376,18 @@ export default function AIAnalysisPage() {
|
|
| 385 |
</div>
|
| 386 |
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
|
| 387 |
<div className="bg-white/10 backdrop-blur p-4 rounded-lg">
|
| 388 |
-
<p className="text-sm opacity-75 mb-1">
|
| 389 |
<p className="text-2xl font-bold">{analysis.prediction.recommendation}</p>
|
| 390 |
</div>
|
| 391 |
<div className="bg-white/10 backdrop-blur p-4 rounded-lg">
|
| 392 |
-
<p className="text-sm opacity-75 mb-1">
|
| 393 |
<p className="text-2xl font-bold">{typeof analysis.prediction.confidence === 'number' ? `${analysis.prediction.confidence.toFixed(0)}%` : '—'}</p>
|
| 394 |
</div>
|
| 395 |
<div className="bg-white/10 backdrop-blur p-4 rounded-lg">
|
| 396 |
-
<p className="text-sm opacity-75 mb-1">
|
| 397 |
<p className="text-2xl font-bold">
|
| 398 |
{analysis.prediction.predicted_price !== undefined && analysis.prediction.predicted_price !== null
|
| 399 |
-
? `${
|
| 400 |
: '—'}
|
| 401 |
</p>
|
| 402 |
</div>
|
|
@@ -404,7 +395,7 @@ export default function AIAnalysisPage() {
|
|
| 404 |
</div>
|
| 405 |
) : (
|
| 406 |
<div className="bg-white p-6 rounded-lg shadow-sm border">
|
| 407 |
-
<p className="text-gray-700">
|
| 408 |
</div>
|
| 409 |
)}
|
| 410 |
|
|
@@ -413,13 +404,13 @@ export default function AIAnalysisPage() {
|
|
| 413 |
<div className="bg-white p-6 rounded-lg shadow-sm">
|
| 414 |
<h3 className="font-bold text-gray-900 mb-4 flex items-center gap-2">
|
| 415 |
<Target className="w-5 h-5 text-purple-600" />
|
| 416 |
-
ML Model
|
| 417 |
</h3>
|
| 418 |
-
<p className="text-xs text-gray-500 mb-3">
|
| 419 |
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
| 420 |
{typeof analysis.prediction.trend_score === 'number' && (
|
| 421 |
<div className="p-4 bg-gray-50 rounded-lg">
|
| 422 |
-
<p className="text-sm text-gray-600">
|
| 423 |
<div className="flex items-center gap-2 mt-1">
|
| 424 |
<div className="flex-1 bg-gray-200 rounded-full h-2">
|
| 425 |
<div className="bg-purple-600 h-2 rounded-full" style={{ width: `${analysis.prediction.trend_score}%` }}></div>
|
|
@@ -449,13 +440,13 @@ export default function AIAnalysisPage() {
|
|
| 449 |
href={`/stocks/${symbol}`}
|
| 450 |
className="flex-1 bg-blue-600 text-white text-center py-3 px-6 rounded-md hover:bg-blue-700 font-semibold"
|
| 451 |
>
|
| 452 |
-
|
| 453 |
</Link>
|
| 454 |
<button
|
| 455 |
onClick={() => setAnalysis(null)}
|
| 456 |
className="px-6 py-3 border border-gray-300 rounded-md hover:bg-gray-50 font-semibold"
|
| 457 |
>
|
| 458 |
-
|
| 459 |
</button>
|
| 460 |
</div>
|
| 461 |
</div>
|
|
@@ -464,8 +455,8 @@ export default function AIAnalysisPage() {
|
|
| 464 |
{!analysis && !loading && (
|
| 465 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 466 |
<Brain className="w-16 h-16 text-gray-400 mx-auto mb-4" />
|
| 467 |
-
<h3 className="text-lg font-semibold text-gray-900 mb-2">
|
| 468 |
-
<p className="text-gray-600 mb-6">
|
| 469 |
<div className="flex flex-wrap justify-center gap-2">
|
| 470 |
{quickSymbols.map((s) => (
|
| 471 |
<button
|
|
@@ -478,7 +469,7 @@ export default function AIAnalysisPage() {
|
|
| 478 |
))}
|
| 479 |
</div>
|
| 480 |
{quickSymbols.length === 0 && (
|
| 481 |
-
<p className="text-xs text-gray-400 mt-3">
|
| 482 |
)}
|
| 483 |
</div>
|
| 484 |
)}
|
|
|
|
| 6 |
|
| 7 |
import { fetchJson } from '@/lib/http';
|
| 8 |
import { logger } from '@/lib/logger';
|
|
|
|
| 9 |
|
| 10 |
type TabType = 'market' | 'stock';
|
| 11 |
|
|
|
|
| 34 |
}
|
| 35 |
|
| 36 |
export default function AIAnalysisPage() {
|
|
|
|
|
|
|
|
|
|
| 37 |
const [activeTab, setActiveTab] = useState<TabType>('stock');
|
| 38 |
const [symbol, setSymbol] = useState('');
|
| 39 |
const [analysis, setAnalysis] = useState<AnalysisResult | null>(null);
|
| 40 |
const [loading, setLoading] = useState(false);
|
| 41 |
const [marketOverview, setMarketOverview] = useState<Record<string, unknown> | null>(null);
|
| 42 |
const [quickSymbols, setQuickSymbols] = useState<string[]>([]);
|
|
|
|
| 43 |
|
| 44 |
useEffect(() => {
|
| 45 |
let mounted = true;
|
| 46 |
+
fetchJson<Record<string, unknown>>(`/api/universe?name=bist30`, { method: 'GET' }, { timeoutMs: 20000, retries: 1 })
|
|
|
|
| 47 |
.then((data) => {
|
| 48 |
const symbols = Array.isArray(data?.symbols) ? data.symbols : []
|
| 49 |
if (mounted) setQuickSymbols(symbols.slice(0, 12))
|
|
|
|
| 54 |
return () => {
|
| 55 |
mounted = false
|
| 56 |
}
|
| 57 |
+
}, [])
|
| 58 |
|
| 59 |
const handleMarketAnalysis = async () => {
|
| 60 |
setLoading(true);
|
| 61 |
try {
|
| 62 |
+
const data = await fetchJson<Record<string, unknown>>(`/api/market-overview`, { method: 'GET' }, { timeoutMs: 20000, retries: 1 })
|
| 63 |
setMarketOverview(data)
|
| 64 |
} catch (error) {
|
| 65 |
+
console.error('Market analysis failed:', error);
|
| 66 |
setMarketOverview(null)
|
| 67 |
} finally {
|
| 68 |
setLoading(false);
|
|
|
|
| 78 |
|
| 79 |
const [stockRaw, techRaw, mlRaw] = await Promise.all([
|
| 80 |
fetchJson<Record<string, unknown>>(
|
| 81 |
+
`/api/stock-data?symbol=${encodeURIComponent(sym)}&period=6mo&interval=1d`,
|
| 82 |
{ method: 'GET' },
|
| 83 |
{ timeoutMs: 20000, retries: 1 }
|
| 84 |
),
|
| 85 |
fetchJson<Record<string, unknown>>(
|
| 86 |
+
`/api/technical-analysis?symbol=${encodeURIComponent(sym)}`,
|
| 87 |
{ method: 'GET' },
|
| 88 |
{ timeoutMs: 20000, retries: 1 }
|
| 89 |
).catch(() => null),
|
|
|
|
| 93 |
{
|
| 94 |
timeoutMs: 30000,
|
| 95 |
retries: 0,
|
| 96 |
+
jsonBody: { symbols: [sym], days_ahead: 7, model: 'ensemble' },
|
| 97 |
}
|
| 98 |
).catch(() => null),
|
| 99 |
]);
|
|
|
|
| 103 |
let techSignal: 'BUY' | 'SELL' | 'HOLD' = 'HOLD'
|
| 104 |
try {
|
| 105 |
const scanResp = await fetchJson<Record<string, unknown>>(
|
| 106 |
+
`/api/scan-signals`,
|
| 107 |
{ method: 'POST' },
|
| 108 |
{
|
| 109 |
timeoutMs: 30000,
|
| 110 |
retries: 0,
|
| 111 |
jsonBody: {
|
| 112 |
symbols: [sym],
|
| 113 |
+
period: '6mo',
|
| 114 |
+
interval: '1d',
|
| 115 |
+
limit: 1,
|
| 116 |
},
|
| 117 |
}
|
| 118 |
)
|
| 119 |
|
| 120 |
+
const items = Array.isArray(scanResp?.data) ? scanResp.data : []
|
|
|
|
|
|
|
| 121 |
const item = items.find((x: Record<string, unknown>) => String(x?.symbol || '').toUpperCase() === sym)
|
| 122 |
+
if (item?.gates && typeof item.gates.required_ok === 'boolean') {
|
| 123 |
+
requiredOk = item.gates.required_ok
|
|
|
|
|
|
|
|
|
|
| 124 |
}
|
| 125 |
+
if (item?.technical_signal) {
|
| 126 |
+
const ts = String(item.technical_signal).toUpperCase().trim()
|
| 127 |
if (ts === 'BUY' || ts === 'SELL') techSignal = ts
|
| 128 |
}
|
| 129 |
} catch (e) {
|
|
|
|
| 202 |
};
|
| 203 |
|
| 204 |
const tabs = [
|
| 205 |
+
{ id: 'market', label: 'Piyasa Özeti', icon: Globe },
|
| 206 |
+
{ id: 'stock', label: 'Hisse Analizi', icon: BarChart3 },
|
| 207 |
];
|
| 208 |
|
| 209 |
return (
|
|
|
|
| 213 |
<div className="mb-6">
|
| 214 |
<div className="flex items-center gap-3 mb-2">
|
| 215 |
<Brain className="w-8 h-8 text-purple-600" />
|
| 216 |
+
<h1 className="text-3xl font-bold text-gray-900">Yapay Zeka Analizleri</h1>
|
| 217 |
</div>
|
| 218 |
<p className="text-gray-600">
|
| 219 |
+
Makine öğrenmesi destekli piyasa özeti ve hisse tahminleri (bilgi amaçlı)
|
| 220 |
</p>
|
| 221 |
</div>
|
| 222 |
|
|
|
|
| 247 |
{activeTab === 'market' && (
|
| 248 |
<div className="space-y-6">
|
| 249 |
<div className="bg-white p-6 rounded-lg shadow-sm">
|
| 250 |
+
<h2 className="text-xl font-bold text-gray-900 mb-4">📈 Piyasa Özeti</h2>
|
| 251 |
<p className="text-gray-600 mb-4">
|
| 252 |
+
Bu bölüm, backend'den gelen gerçek piyasa özetini gösterir.
|
| 253 |
</p>
|
| 254 |
<button
|
| 255 |
onClick={handleMarketAnalysis}
|
|
|
|
| 257 |
className="bg-purple-600 text-white px-6 py-3 rounded-md hover:bg-purple-700 disabled:opacity-50 font-semibold flex items-center gap-2"
|
| 258 |
>
|
| 259 |
<Activity className="w-5 h-5" />
|
| 260 |
+
{loading ? 'Yükleniyor...' : 'Piyasa Verisini Getir'}
|
| 261 |
</button>
|
| 262 |
</div>
|
| 263 |
|
|
|
|
| 266 |
<h3 className="font-semibold text-gray-900 mb-3">Market Overview</h3>
|
| 267 |
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
|
| 268 |
<div className="p-4 bg-gray-50 rounded-lg">
|
| 269 |
+
<p className="text-sm text-gray-600">Endeks</p>
|
| 270 |
+
<p className="text-xl font-bold text-gray-900">{String(marketOverview.index || 'BIST 100')}</p>
|
| 271 |
</div>
|
| 272 |
<div className="p-4 bg-gray-50 rounded-lg">
|
| 273 |
+
<p className="text-sm text-gray-600">Değer</p>
|
| 274 |
<p className="text-xl font-bold text-gray-900">
|
| 275 |
+
{typeof marketOverview.value === 'number' ? marketOverview.value.toLocaleString('tr-TR', { minimumFractionDigits: 2 }) : '—'}
|
| 276 |
</p>
|
| 277 |
</div>
|
| 278 |
<div className="p-4 bg-gray-50 rounded-lg">
|
| 279 |
+
<p className="text-sm text-gray-600">Günlük Değişim</p>
|
| 280 |
<p className={`text-xl font-bold ${(Number(marketOverview.change_percent) || 0) >= 0 ? 'text-green-600' : 'text-red-600'}`}>
|
| 281 |
{typeof marketOverview.change_percent === 'number' ? `${marketOverview.change_percent >= 0 ? '+' : ''}${marketOverview.change_percent.toFixed(2)}%` : '—'}
|
| 282 |
</p>
|
|
|
|
| 285 |
</div>
|
| 286 |
) : (
|
| 287 |
<div className="bg-white p-6 rounded-lg shadow-sm">
|
| 288 |
+
<p className="text-gray-600">Henüz veri yok.</p>
|
| 289 |
</div>
|
| 290 |
)}
|
| 291 |
</div>
|
|
|
|
| 295 |
{activeTab === 'stock' && (
|
| 296 |
<div className="space-y-6">
|
| 297 |
<div className="bg-white p-6 rounded-lg shadow-sm">
|
| 298 |
+
<h2 className="text-xl font-bold text-gray-900 mb-4">🧠 Hisse Senedi Analizi</h2>
|
| 299 |
<p className="text-gray-600 mb-4">
|
| 300 |
+
Seçtiğiniz hisse senedi için model çıktısını ve özet metrikleri gösterir.
|
| 301 |
</p>
|
| 302 |
<div className="flex gap-4">
|
| 303 |
<div className="flex-1">
|
|
|
|
| 305 |
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 w-5 h-5 text-gray-400" />
|
| 306 |
<input
|
| 307 |
type="text"
|
| 308 |
+
placeholder="Örn: THYAO, EREGL, AKBNK"
|
| 309 |
value={symbol}
|
| 310 |
onChange={(e) => setSymbol(e.target.value.toUpperCase())}
|
| 311 |
onKeyPress={(e) => e.key === 'Enter' && handleStockAnalyze()}
|
|
|
|
| 319 |
className="bg-purple-600 text-white px-6 py-3 rounded-md hover:bg-purple-700 disabled:opacity-50 disabled:cursor-not-allowed font-semibold flex items-center gap-2"
|
| 320 |
>
|
| 321 |
<Brain className="w-5 h-5" />
|
| 322 |
+
{loading ? 'Analiz Ediliyor...' : 'Analiz Et'}
|
| 323 |
</button>
|
| 324 |
</div>
|
| 325 |
</div>
|
|
|
|
| 335 |
</div>
|
| 336 |
<div className="text-right">
|
| 337 |
<p className="text-3xl font-bold text-gray-900">
|
| 338 |
+
{typeof analysis.stock.current_price === 'number' ? `₺${analysis.stock.current_price.toFixed(2)}` : '—'}
|
| 339 |
</p>
|
| 340 |
<p className={`text-lg font-semibold ${
|
| 341 |
(analysis.stock.change_percent ?? 0) >= 0 ? 'text-green-600' : 'text-red-600'
|
|
|
|
| 348 |
</div>
|
| 349 |
<div className="grid grid-cols-3 gap-4 pt-4 border-t">
|
| 350 |
<div>
|
| 351 |
+
<p className="text-sm text-gray-600">Hacim</p>
|
| 352 |
<p className="font-semibold">{typeof analysis.stock.volume === 'number' ? `${(analysis.stock.volume / 1000000).toFixed(1)}M` : '—'}</p>
|
| 353 |
</div>
|
| 354 |
<div>
|
| 355 |
+
<p className="text-sm text-gray-600">52 Hafta Düşük</p>
|
| 356 |
+
<p className="font-semibold">{typeof analysis.stock.week_52_low === 'number' ? `₺${analysis.stock.week_52_low.toFixed(2)}` : '—'}</p>
|
| 357 |
</div>
|
| 358 |
<div>
|
| 359 |
+
<p className="text-sm text-gray-600">52 Hafta Yüksek</p>
|
| 360 |
+
<p className="font-semibold">{typeof analysis.stock.week_52_high === 'number' ? `₺${analysis.stock.week_52_high.toFixed(2)}` : '—'}</p>
|
| 361 |
</div>
|
| 362 |
</div>
|
| 363 |
</div>
|
|
|
|
| 367 |
<div className="bg-gradient-to-r from-purple-600 to-purple-800 text-white p-6 rounded-lg shadow-lg">
|
| 368 |
<div className="flex items-center gap-2 mb-4">
|
| 369 |
<Zap className="w-6 h-6" />
|
| 370 |
+
<h3 className="text-xl font-bold">ML Tahmini</h3>
|
| 371 |
{analysis.prediction.models_used != null && (
|
| 372 |
<span className="text-xs bg-white/20 px-2 py-1 rounded">
|
| 373 |
Ensemble ({String(analysis.prediction.models_used)} Model)
|
|
|
|
| 376 |
</div>
|
| 377 |
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
|
| 378 |
<div className="bg-white/10 backdrop-blur p-4 rounded-lg">
|
| 379 |
+
<p className="text-sm opacity-75 mb-1">Sinyal</p>
|
| 380 |
<p className="text-2xl font-bold">{analysis.prediction.recommendation}</p>
|
| 381 |
</div>
|
| 382 |
<div className="bg-white/10 backdrop-blur p-4 rounded-lg">
|
| 383 |
+
<p className="text-sm opacity-75 mb-1">Güven</p>
|
| 384 |
<p className="text-2xl font-bold">{typeof analysis.prediction.confidence === 'number' ? `${analysis.prediction.confidence.toFixed(0)}%` : '—'}</p>
|
| 385 |
</div>
|
| 386 |
<div className="bg-white/10 backdrop-blur p-4 rounded-lg">
|
| 387 |
+
<p className="text-sm opacity-75 mb-1">Tahmini Fiyat</p>
|
| 388 |
<p className="text-2xl font-bold">
|
| 389 |
{analysis.prediction.predicted_price !== undefined && analysis.prediction.predicted_price !== null
|
| 390 |
+
? `₺${Number(analysis.prediction.predicted_price).toFixed(2)}`
|
| 391 |
: '—'}
|
| 392 |
</p>
|
| 393 |
</div>
|
|
|
|
| 395 |
</div>
|
| 396 |
) : (
|
| 397 |
<div className="bg-white p-6 rounded-lg shadow-sm border">
|
| 398 |
+
<p className="text-gray-700">ML tahmini alınamadı (servis hata verdi veya veri yok).</p>
|
| 399 |
</div>
|
| 400 |
)}
|
| 401 |
|
|
|
|
| 404 |
<div className="bg-white p-6 rounded-lg shadow-sm">
|
| 405 |
<h3 className="font-bold text-gray-900 mb-4 flex items-center gap-2">
|
| 406 |
<Target className="w-5 h-5 text-purple-600" />
|
| 407 |
+
ML Model Kalitesi
|
| 408 |
</h3>
|
| 409 |
+
<p className="text-xs text-gray-500 mb-3">Bu değerler teknik gösterge değil, ML modelinin iç doğrulama metrikleridir.</p>
|
| 410 |
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
| 411 |
{typeof analysis.prediction.trend_score === 'number' && (
|
| 412 |
<div className="p-4 bg-gray-50 rounded-lg">
|
| 413 |
+
<p className="text-sm text-gray-600">Yön Doğruluğu (Direction Acc.)</p>
|
| 414 |
<div className="flex items-center gap-2 mt-1">
|
| 415 |
<div className="flex-1 bg-gray-200 rounded-full h-2">
|
| 416 |
<div className="bg-purple-600 h-2 rounded-full" style={{ width: `${analysis.prediction.trend_score}%` }}></div>
|
|
|
|
| 440 |
href={`/stocks/${symbol}`}
|
| 441 |
className="flex-1 bg-blue-600 text-white text-center py-3 px-6 rounded-md hover:bg-blue-700 font-semibold"
|
| 442 |
>
|
| 443 |
+
Detaylı Analiz Sayfası
|
| 444 |
</Link>
|
| 445 |
<button
|
| 446 |
onClick={() => setAnalysis(null)}
|
| 447 |
className="px-6 py-3 border border-gray-300 rounded-md hover:bg-gray-50 font-semibold"
|
| 448 |
>
|
| 449 |
+
Yeni Analiz
|
| 450 |
</button>
|
| 451 |
</div>
|
| 452 |
</div>
|
|
|
|
| 455 |
{!analysis && !loading && (
|
| 456 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 457 |
<Brain className="w-16 h-16 text-gray-400 mx-auto mb-4" />
|
| 458 |
+
<h3 className="text-lg font-semibold text-gray-900 mb-2">Yapay Zeka Destekli Analiz</h3>
|
| 459 |
+
<p className="text-gray-600 mb-6">Hisse sembolü girerek ML tahminleri ve AI analizi başlatın</p>
|
| 460 |
<div className="flex flex-wrap justify-center gap-2">
|
| 461 |
{quickSymbols.map((s) => (
|
| 462 |
<button
|
|
|
|
| 469 |
))}
|
| 470 |
</div>
|
| 471 |
{quickSymbols.length === 0 && (
|
| 472 |
+
<p className="text-xs text-gray-400 mt-3">Örnek liste şu an alınamadı.</p>
|
| 473 |
)}
|
| 474 |
</div>
|
| 475 |
)}
|
huggingface-space/nextjs-app/src/app/announcements/page.tsx
CHANGED
|
@@ -4,7 +4,6 @@ import { useEffect, useMemo, useState } from 'react'
|
|
| 4 |
import Link from 'next/link'
|
| 5 |
import { Megaphone, Plus, Trash2, ExternalLink, RefreshCw } from 'lucide-react'
|
| 6 |
import { useAuth } from '@/contexts/AuthContext'
|
| 7 |
-
import { useMarket } from '@/contexts/MarketContext'
|
| 8 |
import { addAnnouncement, deleteAnnouncement, loadAnnouncements, type Announcement } from '@/lib/announcements'
|
| 9 |
|
| 10 |
type KapItem = {
|
|
@@ -20,8 +19,6 @@ type KapItem = {
|
|
| 20 |
|
| 21 |
export default function AnnouncementsPage() {
|
| 22 |
const { user } = useAuth()
|
| 23 |
-
const { market } = useMarket()
|
| 24 |
-
const isUS = market === 'us'
|
| 25 |
const userKey = user?.id
|
| 26 |
|
| 27 |
const [items, setItems] = useState<Announcement[]>([])
|
|
@@ -46,23 +43,22 @@ export default function AnnouncementsPage() {
|
|
| 46 |
const json = await res.json().catch(() => null)
|
| 47 |
if (!json?.ok) {
|
| 48 |
setKapItems([])
|
| 49 |
-
setKapError(
|
| 50 |
return
|
| 51 |
}
|
| 52 |
setKapItems(Array.isArray(json.data) ? json.data : [])
|
| 53 |
} catch {
|
| 54 |
setKapItems([])
|
| 55 |
-
setKapError(
|
| 56 |
} finally {
|
| 57 |
setKapLoading(false)
|
| 58 |
}
|
| 59 |
}
|
| 60 |
|
| 61 |
useEffect(() => {
|
| 62 |
-
|
| 63 |
-
else setKapLoading(false)
|
| 64 |
// eslint-disable-next-line react-hooks/exhaustive-deps
|
| 65 |
-
}, [
|
| 66 |
|
| 67 |
const hasItems = items.length > 0
|
| 68 |
|
|
@@ -92,7 +88,7 @@ export default function AnnouncementsPage() {
|
|
| 92 |
|
| 93 |
const formatDate = (iso: string) => {
|
| 94 |
const d = new Date(iso)
|
| 95 |
-
return d.toLocaleString(
|
| 96 |
}
|
| 97 |
|
| 98 |
return (
|
|
@@ -101,30 +97,28 @@ export default function AnnouncementsPage() {
|
|
| 101 |
<div className="mb-8">
|
| 102 |
<div className="flex items-center gap-3 mb-2">
|
| 103 |
<Megaphone className="w-8 h-8 text-blue-600" />
|
| 104 |
-
<h1 className="text-3xl font-bold text-gray-900">
|
| 105 |
</div>
|
| 106 |
<p className="text-gray-600">
|
| 107 |
-
|
| 108 |
-
? 'Local announcement list (no-toy). KAP integration requires a separate backend.'
|
| 109 |
-
: 'Yerel duyuru listesi (no-toy). KAP entegrasyonu ayrı bir backend gerektirir.'}
|
| 110 |
</p>
|
| 111 |
</div>
|
| 112 |
|
| 113 |
<div className="bg-white rounded-lg shadow p-4 mb-6">
|
| 114 |
-
<h2 className="font-semibold text-gray-900 mb-3">
|
| 115 |
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
| 116 |
<div>
|
| 117 |
-
<label htmlFor="ann-title" className="block text-sm font-medium text-gray-700 mb-1">
|
| 118 |
<input
|
| 119 |
id="ann-title"
|
| 120 |
value={title}
|
| 121 |
onChange={(e) => setTitle(e.target.value)}
|
| 122 |
className="w-full px-3 py-2 border border-gray-300 rounded-md focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
| 123 |
-
placeholder=
|
| 124 |
/>
|
| 125 |
</div>
|
| 126 |
<div>
|
| 127 |
-
<label htmlFor="ann-url" className="block text-sm font-medium text-gray-700 mb-1">
|
| 128 |
<input
|
| 129 |
id="ann-url"
|
| 130 |
value={url}
|
|
@@ -134,14 +128,14 @@ export default function AnnouncementsPage() {
|
|
| 134 |
/>
|
| 135 |
</div>
|
| 136 |
<div className="md:col-span-2">
|
| 137 |
-
<label htmlFor="ann-content" className="block text-sm font-medium text-gray-700 mb-1">
|
| 138 |
<textarea
|
| 139 |
id="ann-content"
|
| 140 |
value={content}
|
| 141 |
onChange={(e) => setContent(e.target.value)}
|
| 142 |
className="w-full px-3 py-2 border border-gray-300 rounded-md focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
| 143 |
rows={4}
|
| 144 |
-
placeholder=
|
| 145 |
/>
|
| 146 |
</div>
|
| 147 |
</div>
|
|
@@ -149,7 +143,7 @@ export default function AnnouncementsPage() {
|
|
| 149 |
<div className="flex items-center justify-between mt-4">
|
| 150 |
<label className="inline-flex items-center gap-2 text-sm text-gray-700">
|
| 151 |
<input type="checkbox" checked={important} onChange={(e) => setImportant(e.target.checked)} />
|
| 152 |
-
|
| 153 |
</label>
|
| 154 |
|
| 155 |
<button
|
|
@@ -157,21 +151,21 @@ export default function AnnouncementsPage() {
|
|
| 157 |
className="inline-flex items-center gap-2 px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 font-semibold"
|
| 158 |
>
|
| 159 |
<Plus className="w-4 h-4" />
|
| 160 |
-
|
| 161 |
</button>
|
| 162 |
</div>
|
| 163 |
</div>
|
| 164 |
|
| 165 |
<div className="bg-white rounded-lg shadow p-4">
|
| 166 |
<div className="flex items-center justify-between mb-3">
|
| 167 |
-
<h2 className="font-semibold text-gray-900">
|
| 168 |
<Link href="/news" className="text-sm text-blue-600 hover:text-blue-800">
|
| 169 |
-
|
| 170 |
</Link>
|
| 171 |
</div>
|
| 172 |
|
| 173 |
{!hasItems ? (
|
| 174 |
-
<div className="text-center py-10 text-gray-600">
|
| 175 |
) : (
|
| 176 |
<div className="space-y-3">
|
| 177 |
{sorted.map((a) => (
|
|
@@ -193,12 +187,12 @@ export default function AnnouncementsPage() {
|
|
| 193 |
rel="noopener noreferrer"
|
| 194 |
className="inline-flex items-center gap-1 text-sm text-blue-600 hover:text-blue-800 mt-2"
|
| 195 |
>
|
| 196 |
-
|
| 197 |
<ExternalLink className="w-3 h-3" />
|
| 198 |
</a>
|
| 199 |
) : null}
|
| 200 |
</div>
|
| 201 |
-
<button onClick={() => onDelete(a.id)} className="text-red-600 hover:text-red-800" aria-label=
|
| 202 |
<Trash2 className="w-5 h-5" />
|
| 203 |
</button>
|
| 204 |
</div>
|
|
@@ -208,7 +202,6 @@ export default function AnnouncementsPage() {
|
|
| 208 |
)}
|
| 209 |
</div>
|
| 210 |
|
| 211 |
-
{!isUS && (
|
| 212 |
<div className="mt-6 bg-white rounded-lg shadow p-4">
|
| 213 |
<div className="flex items-center justify-between gap-3">
|
| 214 |
<h2 className="font-semibold text-gray-900">KAP</h2>
|
|
@@ -223,7 +216,7 @@ export default function AnnouncementsPage() {
|
|
| 223 |
</div>
|
| 224 |
|
| 225 |
<p className="text-sm text-gray-600 mt-1">
|
| 226 |
-
KAP
|
| 227 |
</p>
|
| 228 |
|
| 229 |
{kapError ? <div className="mt-3 text-sm text-amber-900">{kapError}</div> : null}
|
|
@@ -255,7 +248,6 @@ export default function AnnouncementsPage() {
|
|
| 255 |
</div>
|
| 256 |
)}
|
| 257 |
</div>
|
| 258 |
-
)}
|
| 259 |
</div>
|
| 260 |
</div>
|
| 261 |
)
|
|
|
|
| 4 |
import Link from 'next/link'
|
| 5 |
import { Megaphone, Plus, Trash2, ExternalLink, RefreshCw } from 'lucide-react'
|
| 6 |
import { useAuth } from '@/contexts/AuthContext'
|
|
|
|
| 7 |
import { addAnnouncement, deleteAnnouncement, loadAnnouncements, type Announcement } from '@/lib/announcements'
|
| 8 |
|
| 9 |
type KapItem = {
|
|
|
|
| 19 |
|
| 20 |
export default function AnnouncementsPage() {
|
| 21 |
const { user } = useAuth()
|
|
|
|
|
|
|
| 22 |
const userKey = user?.id
|
| 23 |
|
| 24 |
const [items, setItems] = useState<Announcement[]>([])
|
|
|
|
| 43 |
const json = await res.json().catch(() => null)
|
| 44 |
if (!json?.ok) {
|
| 45 |
setKapItems([])
|
| 46 |
+
setKapError('KAP verisi şu an alınamadı (best-effort).')
|
| 47 |
return
|
| 48 |
}
|
| 49 |
setKapItems(Array.isArray(json.data) ? json.data : [])
|
| 50 |
} catch {
|
| 51 |
setKapItems([])
|
| 52 |
+
setKapError('KAP verisi şu an alınamadı (best-effort).')
|
| 53 |
} finally {
|
| 54 |
setKapLoading(false)
|
| 55 |
}
|
| 56 |
}
|
| 57 |
|
| 58 |
useEffect(() => {
|
| 59 |
+
fetchKap()
|
|
|
|
| 60 |
// eslint-disable-next-line react-hooks/exhaustive-deps
|
| 61 |
+
}, [])
|
| 62 |
|
| 63 |
const hasItems = items.length > 0
|
| 64 |
|
|
|
|
| 88 |
|
| 89 |
const formatDate = (iso: string) => {
|
| 90 |
const d = new Date(iso)
|
| 91 |
+
return d.toLocaleString('tr-TR', { year: 'numeric', month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit' })
|
| 92 |
}
|
| 93 |
|
| 94 |
return (
|
|
|
|
| 97 |
<div className="mb-8">
|
| 98 |
<div className="flex items-center gap-3 mb-2">
|
| 99 |
<Megaphone className="w-8 h-8 text-blue-600" />
|
| 100 |
+
<h1 className="text-3xl font-bold text-gray-900">Duyurular</h1>
|
| 101 |
</div>
|
| 102 |
<p className="text-gray-600">
|
| 103 |
+
Yerel duyuru listesi (no-toy). KAP entegrasyonu ayrı bir backend gerektirir.
|
|
|
|
|
|
|
| 104 |
</p>
|
| 105 |
</div>
|
| 106 |
|
| 107 |
<div className="bg-white rounded-lg shadow p-4 mb-6">
|
| 108 |
+
<h2 className="font-semibold text-gray-900 mb-3">Duyuru Ekle</h2>
|
| 109 |
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
| 110 |
<div>
|
| 111 |
+
<label htmlFor="ann-title" className="block text-sm font-medium text-gray-700 mb-1">Başlık</label>
|
| 112 |
<input
|
| 113 |
id="ann-title"
|
| 114 |
value={title}
|
| 115 |
onChange={(e) => setTitle(e.target.value)}
|
| 116 |
className="w-full px-3 py-2 border border-gray-300 rounded-md focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
| 117 |
+
placeholder="Örn: Veri kaynağı güncellendi"
|
| 118 |
/>
|
| 119 |
</div>
|
| 120 |
<div>
|
| 121 |
+
<label htmlFor="ann-url" className="block text-sm font-medium text-gray-700 mb-1">Link (opsiyonel)</label>
|
| 122 |
<input
|
| 123 |
id="ann-url"
|
| 124 |
value={url}
|
|
|
|
| 128 |
/>
|
| 129 |
</div>
|
| 130 |
<div className="md:col-span-2">
|
| 131 |
+
<label htmlFor="ann-content" className="block text-sm font-medium text-gray-700 mb-1">İçerik</label>
|
| 132 |
<textarea
|
| 133 |
id="ann-content"
|
| 134 |
value={content}
|
| 135 |
onChange={(e) => setContent(e.target.value)}
|
| 136 |
className="w-full px-3 py-2 border border-gray-300 rounded-md focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
| 137 |
rows={4}
|
| 138 |
+
placeholder="Duyuru detayları..."
|
| 139 |
/>
|
| 140 |
</div>
|
| 141 |
</div>
|
|
|
|
| 143 |
<div className="flex items-center justify-between mt-4">
|
| 144 |
<label className="inline-flex items-center gap-2 text-sm text-gray-700">
|
| 145 |
<input type="checkbox" checked={important} onChange={(e) => setImportant(e.target.checked)} />
|
| 146 |
+
Önemli
|
| 147 |
</label>
|
| 148 |
|
| 149 |
<button
|
|
|
|
| 151 |
className="inline-flex items-center gap-2 px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 font-semibold"
|
| 152 |
>
|
| 153 |
<Plus className="w-4 h-4" />
|
| 154 |
+
Ekle
|
| 155 |
</button>
|
| 156 |
</div>
|
| 157 |
</div>
|
| 158 |
|
| 159 |
<div className="bg-white rounded-lg shadow p-4">
|
| 160 |
<div className="flex items-center justify-between mb-3">
|
| 161 |
+
<h2 className="font-semibold text-gray-900">Duyuru Listesi</h2>
|
| 162 |
<Link href="/news" className="text-sm text-blue-600 hover:text-blue-800">
|
| 163 |
+
Haberler →
|
| 164 |
</Link>
|
| 165 |
</div>
|
| 166 |
|
| 167 |
{!hasItems ? (
|
| 168 |
+
<div className="text-center py-10 text-gray-600">Henüz duyuru eklenmedi.</div>
|
| 169 |
) : (
|
| 170 |
<div className="space-y-3">
|
| 171 |
{sorted.map((a) => (
|
|
|
|
| 187 |
rel="noopener noreferrer"
|
| 188 |
className="inline-flex items-center gap-1 text-sm text-blue-600 hover:text-blue-800 mt-2"
|
| 189 |
>
|
| 190 |
+
Kaynak
|
| 191 |
<ExternalLink className="w-3 h-3" />
|
| 192 |
</a>
|
| 193 |
) : null}
|
| 194 |
</div>
|
| 195 |
+
<button onClick={() => onDelete(a.id)} className="text-red-600 hover:text-red-800" aria-label="Duyuruyu sil">
|
| 196 |
<Trash2 className="w-5 h-5" />
|
| 197 |
</button>
|
| 198 |
</div>
|
|
|
|
| 202 |
)}
|
| 203 |
</div>
|
| 204 |
|
|
|
|
| 205 |
<div className="mt-6 bg-white rounded-lg shadow p-4">
|
| 206 |
<div className="flex items-center justify-between gap-3">
|
| 207 |
<h2 className="font-semibold text-gray-900">KAP</h2>
|
|
|
|
| 216 |
</div>
|
| 217 |
|
| 218 |
<p className="text-sm text-gray-600 mt-1">
|
| 219 |
+
KAP “light disclosure list” üzerinden (anahtarsız) best-effort çekilir. Sembol bilgisi her zaman gelmeyebilir.
|
| 220 |
</p>
|
| 221 |
|
| 222 |
{kapError ? <div className="mt-3 text-sm text-amber-900">{kapError}</div> : null}
|
|
|
|
| 248 |
</div>
|
| 249 |
)}
|
| 250 |
</div>
|
|
|
|
| 251 |
</div>
|
| 252 |
</div>
|
| 253 |
)
|
huggingface-space/nextjs-app/src/app/api/eligible/route.ts
CHANGED
|
@@ -10,22 +10,6 @@ export const dynamic = 'force-dynamic'
|
|
| 10 |
*/
|
| 11 |
const isProduction = !!API_BASE
|
| 12 |
|
| 13 |
-
type MarketId = 'bist' | 'us'
|
| 14 |
-
|
| 15 |
-
function parseMarket(value: string | null): MarketId {
|
| 16 |
-
return value === 'us' ? 'us' : 'bist'
|
| 17 |
-
}
|
| 18 |
-
|
| 19 |
-
function resolveProjectRoot(join: (...parts: string[]) => string): string {
|
| 20 |
-
const cwd = process.cwd()
|
| 21 |
-
return cwd.endsWith('/nextjs-app') ? join(cwd, '..') : cwd
|
| 22 |
-
}
|
| 23 |
-
|
| 24 |
-
function timestampOf(value: unknown): number {
|
| 25 |
-
const timestamp = Date.parse(String(value || ''))
|
| 26 |
-
return Number.isFinite(timestamp) ? timestamp : 0
|
| 27 |
-
}
|
| 28 |
-
|
| 29 |
interface ScanStage2 {
|
| 30 |
eligible: boolean
|
| 31 |
sharpe: number
|
|
@@ -47,8 +31,6 @@ interface ScanResults {
|
|
| 47 |
export async function GET(request: Request) {
|
| 48 |
const auth = await requireAuth(request)
|
| 49 |
if (!auth.authenticated) return auth.response
|
| 50 |
-
const { searchParams } = new URL(request.url)
|
| 51 |
-
const market = parseMarket(searchParams.get('market'))
|
| 52 |
|
| 53 |
// Production: proxy to HuggingFace Space
|
| 54 |
if (isProduction) {
|
|
@@ -57,7 +39,7 @@ export async function GET(request: Request) {
|
|
| 57 |
const timeout = setTimeout(() => controller.abort(), 30000)
|
| 58 |
|
| 59 |
try {
|
| 60 |
-
const resp = await fetch(apiUrl(
|
| 61 |
headers: { accept: 'application/json' },
|
| 62 |
signal: controller.signal,
|
| 63 |
})
|
|
@@ -88,151 +70,37 @@ export async function GET(request: Request) {
|
|
| 88 |
const { join } = require('path')
|
| 89 |
/* eslint-enable */
|
| 90 |
|
| 91 |
-
const
|
| 92 |
-
|
| 93 |
-
const marketDir = market === 'bist' ? paperDir : join(paperDir, 'markets', market)
|
| 94 |
-
const finalFile = market === 'bist' ? join(paperDir, 'bist100_scan_results.json') : join(marketDir, 'scan_results.json')
|
| 95 |
-
const workFile = market === 'bist' ? join(paperDir, 'bist100_scan_results_work.json') : join(marketDir, 'scan_results_work.json')
|
| 96 |
-
|
| 97 |
-
let finalRaw: string | null = null
|
| 98 |
-
let workRaw: string | null = null
|
| 99 |
|
| 100 |
try {
|
| 101 |
-
|
| 102 |
} catch {
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
if (!finalRaw && !workRaw) {
|
| 113 |
-
return NextResponse.json(
|
| 114 |
-
{
|
| 115 |
-
ok: false,
|
| 116 |
-
error: `Scan results not found. Run ${market.toUpperCase()} scan first.`,
|
| 117 |
-
scanRunning: false,
|
| 118 |
-
scanProgress: '',
|
| 119 |
-
scanCompleted: false,
|
| 120 |
-
universe: null,
|
| 121 |
-
market,
|
| 122 |
-
totalStocks: null,
|
| 123 |
-
stage1Done: null,
|
| 124 |
-
stage2Done: null,
|
| 125 |
-
updatedAt: null,
|
| 126 |
-
eligible: [],
|
| 127 |
-
excluded: [],
|
| 128 |
-
stage1Failures: [],
|
| 129 |
-
stage1PassedCount: 0,
|
| 130 |
-
summary: {
|
| 131 |
-
eligibleCount: 0,
|
| 132 |
-
excludedCount: 0,
|
| 133 |
-
stage1FailedCount: 0,
|
| 134 |
-
avgSharpe: 0,
|
| 135 |
-
avgReturn: 0,
|
| 136 |
-
avgHitRate: 0,
|
| 137 |
-
},
|
| 138 |
-
},
|
| 139 |
-
{ status: 404 }
|
| 140 |
-
)
|
| 141 |
-
}
|
| 142 |
-
|
| 143 |
-
const finalData = finalRaw ? JSON.parse(finalRaw) as Record<string, unknown> : null
|
| 144 |
-
const workData = workRaw ? JSON.parse(workRaw) as Record<string, unknown> : null
|
| 145 |
-
const finalTimestamp = Math.max(
|
| 146 |
-
timestampOf(finalData?.updated_at),
|
| 147 |
-
timestampOf(finalData?.scan_finished),
|
| 148 |
-
timestampOf(finalData?.scan_started),
|
| 149 |
-
timestampOf(finalData?.timestamp),
|
| 150 |
-
)
|
| 151 |
-
const workTimestamp = Math.max(
|
| 152 |
-
timestampOf(workData?.updated_at),
|
| 153 |
-
timestampOf(workData?.scan_finished),
|
| 154 |
-
timestampOf(workData?.scan_started),
|
| 155 |
-
timestampOf(workData?.timestamp),
|
| 156 |
-
)
|
| 157 |
-
const usingWorkFile = Boolean(workData) && (!finalData || !workData?.completed || workTimestamp >= finalTimestamp)
|
| 158 |
-
const rawData = (usingWorkFile ? workData : finalData) as Record<string, unknown> | null
|
| 159 |
-
|
| 160 |
-
if (!rawData) {
|
| 161 |
-
throw new Error('Eligible scan payload is empty')
|
| 162 |
-
}
|
| 163 |
-
|
| 164 |
-
const stage1 = (rawData.stage1 ?? {}) as Record<string, Record<string, unknown>>
|
| 165 |
-
const stage2 = (rawData.stage2 ?? {}) as Record<string, Record<string, unknown>>
|
| 166 |
-
const completed = Boolean(rawData.completed && !usingWorkFile)
|
| 167 |
-
|
| 168 |
-
const stage1Failures: Array<{ symbol: string; reason: string }> = []
|
| 169 |
-
let stage1PassedCount = 0
|
| 170 |
-
let stage1Done = 0
|
| 171 |
-
|
| 172 |
-
for (const [sym, info] of Object.entries(stage1)) {
|
| 173 |
-
stage1Done += 1
|
| 174 |
-
if (info?.passed) {
|
| 175 |
-
stage1PassedCount += 1
|
| 176 |
-
} else {
|
| 177 |
-
stage1Failures.push({ symbol: sym, reason: String(info?.reason || 'FAIL') })
|
| 178 |
}
|
| 179 |
}
|
| 180 |
|
| 181 |
-
|
| 182 |
-
|
| 183 |
-
ok: false,
|
| 184 |
-
error: `Scan results not ready yet. Start/finish the ${market.toUpperCase()} scan first.`,
|
| 185 |
-
scanRunning: true,
|
| 186 |
-
scanProgress: stage1Done > 0
|
| 187 |
-
? `Stage 1: ${stage1PassedCount}/${stage1Done || rawData.total_stocks || '?'} passed`
|
| 188 |
-
: `${market.toUpperCase()} scan is starting...`,
|
| 189 |
-
scanCompleted: false,
|
| 190 |
-
universe: rawData.universe ?? null,
|
| 191 |
-
market,
|
| 192 |
-
totalStocks: rawData.total_stocks ?? null,
|
| 193 |
-
stage1Done,
|
| 194 |
-
stage2Done: 0,
|
| 195 |
-
updatedAt: rawData.updated_at ?? null,
|
| 196 |
-
eligible: [],
|
| 197 |
-
excluded: [],
|
| 198 |
-
stage1Failures,
|
| 199 |
-
stage1PassedCount,
|
| 200 |
-
summary: {
|
| 201 |
-
eligibleCount: 0,
|
| 202 |
-
excludedCount: 0,
|
| 203 |
-
stage1FailedCount: stage1Failures.length,
|
| 204 |
-
avgSharpe: 0,
|
| 205 |
-
avgReturn: 0,
|
| 206 |
-
avgHitRate: 0,
|
| 207 |
-
},
|
| 208 |
-
timestamp: rawData.scan_started ?? rawData.updated_at ?? null,
|
| 209 |
-
})
|
| 210 |
-
}
|
| 211 |
|
| 212 |
const eligible: Array<ScanStage2 & { symbol: string }> = []
|
| 213 |
const excluded: Array<{ symbol: string; reason: string; sharpe?: number }> = []
|
| 214 |
|
| 215 |
-
for (const [sym, info] of Object.entries(
|
| 216 |
-
const rawReturn = Number(info.annual_return ?? info.total_return_pct ?? 0)
|
| 217 |
-
const rawHitRate = Number(info.hit_rate ?? 0)
|
| 218 |
-
const normalizedReturn = Math.abs(rawReturn) > 1.5 ? rawReturn / 100 : rawReturn
|
| 219 |
-
const normalizedHitRate = rawHitRate > 1 ? rawHitRate / 100 : rawHitRate
|
| 220 |
-
|
| 221 |
if (info.eligible) {
|
| 222 |
-
eligible.push({
|
| 223 |
-
symbol: sym,
|
| 224 |
-
eligible: true,
|
| 225 |
-
sharpe: Number(info.sharpe ?? 0),
|
| 226 |
-
annual_return: normalizedReturn,
|
| 227 |
-
hit_rate: normalizedHitRate,
|
| 228 |
-
quality: String(info.quality || ''),
|
| 229 |
-
reason: typeof info.reason === 'string' ? info.reason : undefined,
|
| 230 |
-
})
|
| 231 |
} else {
|
| 232 |
excluded.push({
|
| 233 |
symbol: sym,
|
| 234 |
-
reason:
|
| 235 |
-
sharpe:
|
| 236 |
})
|
| 237 |
}
|
| 238 |
}
|
|
@@ -240,6 +108,11 @@ export async function GET(request: Request) {
|
|
| 240 |
eligible.sort((a, b) => (b.sharpe || 0) - (a.sharpe || 0))
|
| 241 |
excluded.sort((a, b) => a.symbol.localeCompare(b.symbol))
|
| 242 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 243 |
const avgSharpe = eligible.length
|
| 244 |
? eligible.reduce((sum, e) => sum + (e.sharpe || 0), 0) / eligible.length
|
| 245 |
: 0
|
|
@@ -255,18 +128,7 @@ export async function GET(request: Request) {
|
|
| 255 |
eligible,
|
| 256 |
excluded,
|
| 257 |
stage1Failures,
|
| 258 |
-
|
| 259 |
-
scanProgress: usingWorkFile
|
| 260 |
-
? `Stage 2: ${Object.keys(stage2).length}/${stage1PassedCount || '?'} stocks processed`
|
| 261 |
-
: '',
|
| 262 |
-
scanCompleted: completed,
|
| 263 |
-
universe: rawData.universe ?? null,
|
| 264 |
-
market,
|
| 265 |
-
totalStocks: rawData.total_stocks ?? null,
|
| 266 |
-
stage1Done,
|
| 267 |
-
stage2Done: Object.keys(stage2).length,
|
| 268 |
-
updatedAt: rawData.updated_at ?? null,
|
| 269 |
-
stage1PassedCount,
|
| 270 |
summary: {
|
| 271 |
eligibleCount: eligible.length,
|
| 272 |
excludedCount: excluded.length,
|
|
@@ -275,7 +137,7 @@ export async function GET(request: Request) {
|
|
| 275 |
avgReturn: Number(avgReturn.toFixed(2)),
|
| 276 |
avgHitRate: Number(avgHitRate.toFixed(2)),
|
| 277 |
},
|
| 278 |
-
timestamp:
|
| 279 |
})
|
| 280 |
} catch (e: unknown) {
|
| 281 |
return NextResponse.json(
|
|
|
|
| 10 |
*/
|
| 11 |
const isProduction = !!API_BASE
|
| 12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
interface ScanStage2 {
|
| 14 |
eligible: boolean
|
| 15 |
sharpe: number
|
|
|
|
| 31 |
export async function GET(request: Request) {
|
| 32 |
const auth = await requireAuth(request)
|
| 33 |
if (!auth.authenticated) return auth.response
|
|
|
|
|
|
|
| 34 |
|
| 35 |
// Production: proxy to HuggingFace Space
|
| 36 |
if (isProduction) {
|
|
|
|
| 39 |
const timeout = setTimeout(() => controller.abort(), 30000)
|
| 40 |
|
| 41 |
try {
|
| 42 |
+
const resp = await fetch(apiUrl('/api/eligible'), {
|
| 43 |
headers: { accept: 'application/json' },
|
| 44 |
signal: controller.signal,
|
| 45 |
})
|
|
|
|
| 70 |
const { join } = require('path')
|
| 71 |
/* eslint-enable */
|
| 72 |
|
| 73 |
+
const filePath = join(process.cwd(), '..', 'paper_trading', 'bist100_scan_results.json')
|
| 74 |
+
let raw: string
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 75 |
|
| 76 |
try {
|
| 77 |
+
raw = await readFile(filePath, 'utf-8')
|
| 78 |
} catch {
|
| 79 |
+
const altPath = join(process.cwd(), 'paper_trading', 'bist100_scan_results.json')
|
| 80 |
+
try {
|
| 81 |
+
raw = await readFile(altPath, 'utf-8')
|
| 82 |
+
} catch {
|
| 83 |
+
return NextResponse.json(
|
| 84 |
+
{ ok: false, error: 'Scan results not found. Run BIST100 scan first.' },
|
| 85 |
+
{ status: 404 }
|
| 86 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
}
|
| 88 |
}
|
| 89 |
|
| 90 |
+
const data: ScanResults = JSON.parse(raw)
|
| 91 |
+
const s2 = data.stage2 || {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 92 |
|
| 93 |
const eligible: Array<ScanStage2 & { symbol: string }> = []
|
| 94 |
const excluded: Array<{ symbol: string; reason: string; sharpe?: number }> = []
|
| 95 |
|
| 96 |
+
for (const [sym, info] of Object.entries(s2)) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
if (info.eligible) {
|
| 98 |
+
eligible.push({ symbol: sym, ...info })
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 99 |
} else {
|
| 100 |
excluded.push({
|
| 101 |
symbol: sym,
|
| 102 |
+
reason: info.reason || info.quality || 'Yetersiz kalite',
|
| 103 |
+
sharpe: info.sharpe,
|
| 104 |
})
|
| 105 |
}
|
| 106 |
}
|
|
|
|
| 108 |
eligible.sort((a, b) => (b.sharpe || 0) - (a.sharpe || 0))
|
| 109 |
excluded.sort((a, b) => a.symbol.localeCompare(b.symbol))
|
| 110 |
|
| 111 |
+
const stage1Failures = Object.entries(data.stage1?.failed || {}).map(([sym, reason]) => ({
|
| 112 |
+
symbol: sym,
|
| 113 |
+
reason,
|
| 114 |
+
}))
|
| 115 |
+
|
| 116 |
const avgSharpe = eligible.length
|
| 117 |
? eligible.reduce((sum, e) => sum + (e.sharpe || 0), 0) / eligible.length
|
| 118 |
: 0
|
|
|
|
| 128 |
eligible,
|
| 129 |
excluded,
|
| 130 |
stage1Failures,
|
| 131 |
+
stage1PassedCount: data.stage1?.passed?.length || 0,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 132 |
summary: {
|
| 133 |
eligibleCount: eligible.length,
|
| 134 |
excludedCount: excluded.length,
|
|
|
|
| 137 |
avgReturn: Number(avgReturn.toFixed(2)),
|
| 138 |
avgHitRate: Number(avgHitRate.toFixed(2)),
|
| 139 |
},
|
| 140 |
+
timestamp: data.timestamp,
|
| 141 |
})
|
| 142 |
} catch (e: unknown) {
|
| 143 |
return NextResponse.json(
|
huggingface-space/nextjs-app/src/app/api/health/route.ts
CHANGED
|
@@ -17,8 +17,8 @@ export async function GET() {
|
|
| 17 |
const backendBase = API_BASE;
|
| 18 |
if (!backendBase) {
|
| 19 |
return NextResponse.json(
|
| 20 |
-
{ ok:
|
| 21 |
-
{ status:
|
| 22 |
);
|
| 23 |
}
|
| 24 |
|
|
|
|
| 17 |
const backendBase = API_BASE;
|
| 18 |
if (!backendBase) {
|
| 19 |
return NextResponse.json(
|
| 20 |
+
{ ok: false, error: 'Backend bağlantısı yapılandırılmamış.' },
|
| 21 |
+
{ status: 500, headers: { 'Cache-Control': 'no-store' } }
|
| 22 |
);
|
| 23 |
}
|
| 24 |
|
huggingface-space/nextjs-app/src/app/api/ml-predictions/route.ts
CHANGED
|
@@ -1,64 +1,21 @@
|
|
| 1 |
import { NextRequest, NextResponse } from 'next/server'
|
| 2 |
-
import { spawn } from 'child_process'
|
| 3 |
-
import { resolve } from 'path'
|
| 4 |
|
| 5 |
import { API_BASE } from '@/lib/runtime-config'
|
| 6 |
import { requireAuth } from '@/lib/api-auth'
|
| 7 |
|
| 8 |
export const dynamic = 'force-dynamic'
|
| 9 |
|
| 10 |
-
type MarketId = 'bist' | 'us'
|
| 11 |
-
|
| 12 |
-
function parseMarket(value: unknown): MarketId {
|
| 13 |
-
return String(value || '').trim().toLowerCase() === 'us' ? 'us' : 'bist'
|
| 14 |
-
}
|
| 15 |
-
|
| 16 |
-
function resolveProjectRoot() {
|
| 17 |
-
const cwd = process.cwd()
|
| 18 |
-
return cwd.endsWith('/nextjs-app') ? resolve(cwd, '..') : cwd
|
| 19 |
-
}
|
| 20 |
-
|
| 21 |
-
function runLocalPredictions(symbols: string[], daysAhead: number, model: string, market: MarketId) {
|
| 22 |
-
const projectRoot = resolveProjectRoot()
|
| 23 |
-
const script = [
|
| 24 |
-
'import json, math',
|
| 25 |
-
'from ai.predictions_api import predict_multiple_stocks_for_api',
|
| 26 |
-
'def clean(value):',
|
| 27 |
-
' if isinstance(value, dict):',
|
| 28 |
-
' return {k: clean(v) for k, v in value.items()}',
|
| 29 |
-
' if isinstance(value, list):',
|
| 30 |
-
' return [clean(v) for v in value]',
|
| 31 |
-
' if isinstance(value, float) and (math.isnan(value) or math.isinf(value)):',
|
| 32 |
-
' return None',
|
| 33 |
-
' return value',
|
| 34 |
-
`payload = predict_multiple_stocks_for_api(${JSON.stringify(symbols)}, days_ahead=${daysAhead}, model_type=${JSON.stringify(model)}, market_id=${JSON.stringify(market)})`,
|
| 35 |
-
`print("__JSON__" + json.dumps({"model": ${JSON.stringify(model)}, "days_ahead": ${daysAhead}, "market": ${JSON.stringify(market)}, "predictions": clean(payload)}, allow_nan=False))`,
|
| 36 |
-
].join('\n')
|
| 37 |
-
|
| 38 |
-
return new Promise<{ code: number; stdout: string; stderr: string }>((resolveResult) => {
|
| 39 |
-
const proc = spawn('python3', ['-c', script], { cwd: projectRoot, env: { ...process.env } })
|
| 40 |
-
let stdout = ''
|
| 41 |
-
let stderr = ''
|
| 42 |
-
|
| 43 |
-
proc.stdout.on('data', (chunk) => {
|
| 44 |
-
stdout += chunk.toString()
|
| 45 |
-
})
|
| 46 |
-
proc.stderr.on('data', (chunk) => {
|
| 47 |
-
stderr += chunk.toString()
|
| 48 |
-
})
|
| 49 |
-
proc.on('close', (code) => {
|
| 50 |
-
resolveResult({ code: code ?? 1, stdout, stderr })
|
| 51 |
-
})
|
| 52 |
-
proc.on('error', (error) => {
|
| 53 |
-
resolveResult({ code: 1, stdout, stderr: error.message })
|
| 54 |
-
})
|
| 55 |
-
})
|
| 56 |
-
}
|
| 57 |
-
|
| 58 |
export async function POST(req: NextRequest) {
|
| 59 |
const auth = await requireAuth(req)
|
| 60 |
if (!auth.authenticated) return auth.response
|
| 61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
let body: Record<string, unknown> | null = null
|
| 63 |
try {
|
| 64 |
body = await req.json()
|
|
@@ -92,40 +49,6 @@ export async function POST(req: NextRequest) {
|
|
| 92 |
}
|
| 93 |
|
| 94 |
body.symbols = validSymbols
|
| 95 |
-
const market = parseMarket(body.market)
|
| 96 |
-
const model = typeof body.model === 'string' && body.model.trim() ? body.model.trim() : 'ensemble'
|
| 97 |
-
const daysAhead = Number.isFinite(Number(body.days_ahead)) ? Math.max(1, Math.min(30, Number(body.days_ahead))) : 7
|
| 98 |
-
body.market = market
|
| 99 |
-
body.model = model
|
| 100 |
-
body.days_ahead = daysAhead
|
| 101 |
-
|
| 102 |
-
if (!API_BASE) {
|
| 103 |
-
const local = await runLocalPredictions(validSymbols, daysAhead, model, market)
|
| 104 |
-
if (local.code !== 0) {
|
| 105 |
-
return NextResponse.json({ ok: false, error: local.stderr || 'Local ml-predictions failed' }, { status: 500 })
|
| 106 |
-
}
|
| 107 |
-
|
| 108 |
-
const payloadLine = local.stdout
|
| 109 |
-
.split('\n')
|
| 110 |
-
.map((line) => line.trim())
|
| 111 |
-
.find((line) => line.startsWith('__JSON__'))
|
| 112 |
-
|
| 113 |
-
if (!payloadLine) {
|
| 114 |
-
return NextResponse.json(
|
| 115 |
-
{ ok: false, error: 'Local ml-predictions returned no JSON payload', detail: local.stdout || local.stderr },
|
| 116 |
-
{ status: 500 }
|
| 117 |
-
)
|
| 118 |
-
}
|
| 119 |
-
|
| 120 |
-
try {
|
| 121 |
-
return NextResponse.json(JSON.parse(payloadLine.slice('__JSON__'.length)), { status: 200 })
|
| 122 |
-
} catch (error: unknown) {
|
| 123 |
-
return NextResponse.json(
|
| 124 |
-
{ ok: false, error: error instanceof Error ? error.message : 'Invalid local ml-predictions payload', detail: payloadLine },
|
| 125 |
-
{ status: 500 }
|
| 126 |
-
)
|
| 127 |
-
}
|
| 128 |
-
}
|
| 129 |
|
| 130 |
try {
|
| 131 |
// Longer timeout for batch predictions (10+ stocks can take time)
|
|
|
|
| 1 |
import { NextRequest, NextResponse } from 'next/server'
|
|
|
|
|
|
|
| 2 |
|
| 3 |
import { API_BASE } from '@/lib/runtime-config'
|
| 4 |
import { requireAuth } from '@/lib/api-auth'
|
| 5 |
|
| 6 |
export const dynamic = 'force-dynamic'
|
| 7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
export async function POST(req: NextRequest) {
|
| 9 |
const auth = await requireAuth(req)
|
| 10 |
if (!auth.authenticated) return auth.response
|
| 11 |
|
| 12 |
+
if (!API_BASE) {
|
| 13 |
+
return NextResponse.json(
|
| 14 |
+
{ ok: false, error: 'API URL yapılandırılmamış. NEXT_PUBLIC_API_URL tanımlayın.' },
|
| 15 |
+
{ status: 500 }
|
| 16 |
+
)
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
let body: Record<string, unknown> | null = null
|
| 20 |
try {
|
| 21 |
body = await req.json()
|
|
|
|
| 49 |
}
|
| 50 |
|
| 51 |
body.symbols = validSymbols
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
|
| 53 |
try {
|
| 54 |
// Longer timeout for batch predictions (10+ stocks can take time)
|
huggingface-space/nextjs-app/src/app/api/trading/route.ts
CHANGED
|
@@ -119,45 +119,28 @@ interface EquityPoint { date: string; equity: number; drawdown?: number; cash?:
|
|
| 119 |
interface SignalRecord { symbol: string; signal: string; price?: number; date?: string; strategy?: string; reason?: string; ml_signal?: string; tech_signal?: string; confidence?: number; predicted_return?: number; action_taken?: string }
|
| 120 |
interface SymbolStat { trades: number; wins: number; pnl: number }
|
| 121 |
interface BrokerPosition { qty?: number; avg_cost?: number; entry_date?: string }
|
| 122 |
-
const SUPPORTED_MARKETS = ['bist', 'us'] as const
|
| 123 |
-
type MarketId = (typeof SUPPORTED_MARKETS)[number]
|
| 124 |
|
| 125 |
-
function
|
| 126 |
-
const cwd = process.cwd()
|
| 127 |
-
return cwd.endsWith('/nextjs-app') ? resolvePath(cwd, '..') : cwd
|
| 128 |
-
}
|
| 129 |
-
|
| 130 |
-
function timestampOf(value: unknown) {
|
| 131 |
-
const timestamp = Date.parse(String(value || ''))
|
| 132 |
-
return Number.isFinite(timestamp) ? timestamp : 0
|
| 133 |
-
}
|
| 134 |
-
|
| 135 |
-
function getLocalPaths(marketId: MarketId = 'bist') {
|
| 136 |
const { join, resolve } = require('path')
|
| 137 |
-
const PROJECT_ROOT =
|
| 138 |
const PAPER_DIR = join(PROJECT_ROOT, 'paper_trading')
|
| 139 |
-
const MARKET_DIR = marketId === 'bist' ? PAPER_DIR : join(PAPER_DIR, 'markets', marketId)
|
| 140 |
-
const AUTO_TRADER_DIR = marketId === 'bist' ? join(PAPER_DIR, 'auto_trader') : join(MARKET_DIR, 'auto_trader')
|
| 141 |
return {
|
| 142 |
PROJECT_ROOT,
|
| 143 |
PAPER_DIR,
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
SCAN_FILE: marketId === 'bist' ? join(PAPER_DIR, 'bist100_scan_results.json') : join(MARKET_DIR, 'scan_results.json'),
|
| 153 |
-
SCAN_WORK_FILE: marketId === 'bist' ? join(PAPER_DIR, 'bist100_scan_results_work.json') : join(MARKET_DIR, 'scan_results_work.json'),
|
| 154 |
-
PID_FILE: join(AUTO_TRADER_DIR, 'worker.pid'),
|
| 155 |
}
|
| 156 |
}
|
| 157 |
|
| 158 |
-
async function localIsWorkerRunning(
|
| 159 |
const { readFile } = getLocalModules()
|
| 160 |
-
const { PID_FILE } = getLocalPaths(
|
| 161 |
try {
|
| 162 |
const pidStr = await readFile(PID_FILE, 'utf-8')
|
| 163 |
const pid = parseInt(pidStr.trim())
|
|
@@ -194,7 +177,7 @@ function localSpawnPython(args: string[], timeoutMs = 120000): Promise<{ code: n
|
|
| 194 |
|
| 195 |
function localSafePath(filePath: string): boolean {
|
| 196 |
const { resolve: pathResolve } = require('path')
|
| 197 |
-
const { PAPER_DIR } = getLocalPaths(
|
| 198 |
const resolved = pathResolve(filePath)
|
| 199 |
return resolved.startsWith(PAPER_DIR)
|
| 200 |
}
|
|
@@ -224,88 +207,23 @@ async function localReadJson(path: string): Promise<Record<string, unknown> | nu
|
|
| 224 |
} catch { return null }
|
| 225 |
}
|
| 226 |
|
| 227 |
-
async function
|
| 228 |
-
const
|
| 229 |
-
const [
|
| 230 |
-
localReadJson(SCAN_FILE),
|
| 231 |
-
localReadJson(SCAN_WORK_FILE),
|
| 232 |
-
])
|
| 233 |
-
|
| 234 |
-
if (!workData) return finalData
|
| 235 |
-
if (!finalData) return workData
|
| 236 |
-
|
| 237 |
-
const finalTimestamp = Math.max(
|
| 238 |
-
timestampOf(finalData.updated_at),
|
| 239 |
-
timestampOf(finalData.scan_finished),
|
| 240 |
-
timestampOf(finalData.scan_started),
|
| 241 |
-
timestampOf(finalData.timestamp),
|
| 242 |
-
)
|
| 243 |
-
const workTimestamp = Math.max(
|
| 244 |
-
timestampOf(workData.updated_at),
|
| 245 |
-
timestampOf(workData.scan_finished),
|
| 246 |
-
timestampOf(workData.scan_started),
|
| 247 |
-
timestampOf(workData.timestamp),
|
| 248 |
-
)
|
| 249 |
-
|
| 250 |
-
return !workData.completed || workTimestamp >= finalTimestamp ? workData : finalData
|
| 251 |
-
}
|
| 252 |
-
|
| 253 |
-
function mapPyMarketSnapshot(snap: Record<string, unknown>) {
|
| 254 |
-
// Maps Python _build_market_snapshot() output (snake_case SQLite rows) to TS camelCase interfaces.
|
| 255 |
-
const rawOpen: Record<string, unknown>[] = (snap.openTrades ?? []) as Record<string, unknown>[]
|
| 256 |
-
const rawClosed: Record<string, unknown>[] = (snap.closedTrades ?? []) as Record<string, unknown>[]
|
| 257 |
-
const rawSignals: Record<string, unknown>[] = (snap.signals ?? []) as Record<string, unknown>[]
|
| 258 |
-
const rawEquity: Record<string, unknown>[] = (snap.equityCurve ?? []) as Record<string, unknown>[]
|
| 259 |
-
const perf: Record<string, unknown> = (snap.performance ?? {}) as Record<string, unknown>
|
| 260 |
-
|
| 261 |
-
return {
|
| 262 |
-
openTrades: rawOpen.map((t) => ({
|
| 263 |
-
symbol: t.symbol, quantity: t.quantity,
|
| 264 |
-
entryPrice: t.entry_price, entryDate: t.entry_date,
|
| 265 |
-
confidence: t.signal_confidence, predictedReturn: t.predicted_return,
|
| 266 |
-
commission: t.entry_commission,
|
| 267 |
-
})),
|
| 268 |
-
closedTrades: rawClosed.map((t) => ({
|
| 269 |
-
symbol: t.symbol, quantity: t.quantity,
|
| 270 |
-
entryPrice: t.entry_price, exitPrice: t.exit_price,
|
| 271 |
-
entryDate: t.entry_date, exitDate: t.exit_date,
|
| 272 |
-
netPnl: t.net_pnl, returnPct: t.return_pct,
|
| 273 |
-
holdingDays: t.holding_days, exitReason: t.exit_reason,
|
| 274 |
-
})),
|
| 275 |
-
signals: rawSignals.map((s) => ({
|
| 276 |
-
date: s.date, symbol: s.symbol, signal: s.signal,
|
| 277 |
-
mlSignal: s.ml_signal, techSignal: s.tech_signal,
|
| 278 |
-
confidence: s.confidence, predictedReturn: s.predicted_return,
|
| 279 |
-
actionTaken: s.action_taken,
|
| 280 |
-
})),
|
| 281 |
-
equityCurve: rawEquity.map((e) => ({
|
| 282 |
-
date: e.date, equity: e.equity, cash: e.cash,
|
| 283 |
-
positions: e.positions_count, realizedPnl: e.realized_pnl_today ?? 0,
|
| 284 |
-
})),
|
| 285 |
-
performance: {
|
| 286 |
-
totalPnl: Number(perf.total_pnl ?? 0),
|
| 287 |
-
closedTradesCount: Number(perf.total_trades ?? rawClosed.length),
|
| 288 |
-
openTradesCount: rawOpen.length,
|
| 289 |
-
winRate: Number(perf.win_rate_pct ?? 0),
|
| 290 |
-
profitFactor: perf.profit_factor === null ? 0 : Number(perf.profit_factor ?? 0),
|
| 291 |
-
avgWin: Number(perf.avg_win_pnl ?? 0),
|
| 292 |
-
avgLoss: Math.abs(Number(perf.avg_loss_pnl ?? 0)),
|
| 293 |
-
bestTrade: 0,
|
| 294 |
-
worstTrade: 0,
|
| 295 |
-
symbolBreakdown: [],
|
| 296 |
-
},
|
| 297 |
-
}
|
| 298 |
-
}
|
| 299 |
-
|
| 300 |
-
async function buildLocalMarketSummary(marketId: MarketId) {
|
| 301 |
-
const paths = getLocalPaths(marketId)
|
| 302 |
-
const [state, status, scanData, workerStatus] = await Promise.all([
|
| 303 |
localReadJson(paths.STATE_FILE),
|
| 304 |
localReadJson(paths.STATUS_FILE),
|
| 305 |
-
|
| 306 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 307 |
])
|
| 308 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 309 |
let eligibleStocks: string[] = []
|
| 310 |
try {
|
| 311 |
const s2 = (scanData as Record<string, unknown>)?.stage2 as Record<string, { eligible?: boolean }> | undefined
|
|
@@ -314,97 +232,6 @@ async function buildLocalMarketSummary(marketId: MarketId) {
|
|
| 314 |
}
|
| 315 |
} catch { /* ignore */ }
|
| 316 |
|
| 317 |
-
const positions = (state?.broker_positions ?? {}) as Record<string, BrokerPosition>
|
| 318 |
-
const cash = Number(state?.broker_cash ?? 100000)
|
| 319 |
-
const stage1 = ((scanData as Record<string, unknown>)?.stage1 ?? {}) as Record<string, unknown>
|
| 320 |
-
const stage2 = ((scanData as Record<string, unknown>)?.stage2 ?? {}) as Record<string, unknown>
|
| 321 |
-
|
| 322 |
-
const baseSummary = {
|
| 323 |
-
marketId,
|
| 324 |
-
displayName: marketId === 'bist' ? 'Borsa Istanbul' : 'US Equities',
|
| 325 |
-
currency: marketId === 'bist' ? 'TRY' : 'USD',
|
| 326 |
-
status: {
|
| 327 |
-
isRunning: status?.is_running ?? false,
|
| 328 |
-
workerRunning: workerStatus.running,
|
| 329 |
-
workerPid: workerStatus.pid,
|
| 330 |
-
lastRunDate: state?.last_run_date ?? null,
|
| 331 |
-
totalDaysRun: state?.total_days_run ?? 0,
|
| 332 |
-
totalTrades: state?.total_trades ?? 0,
|
| 333 |
-
currentPhase: status?.current_phase ?? null,
|
| 334 |
-
lastWorkerStatus: status?.status ?? (status?.last_result as Record<string, unknown>)?.status ?? null,
|
| 335 |
-
lastWorkerReason: status?.reason ?? (status?.last_result as Record<string, unknown>)?.reason ?? null,
|
| 336 |
-
lastResult: status?.last_result ?? null,
|
| 337 |
-
updatedAt: status?.updated_at ?? null,
|
| 338 |
-
},
|
| 339 |
-
portfolio: {
|
| 340 |
-
cash: Math.round(cash * 100) / 100,
|
| 341 |
-
equity: Math.round(cash * 100) / 100,
|
| 342 |
-
positionCount: Object.keys(positions).length,
|
| 343 |
-
positions: (Object.entries(positions) as [string, BrokerPosition][]).map(([sym, pos]) => ({
|
| 344 |
-
symbol: sym, quantity: pos.qty, avgCost: pos.avg_cost, entryDate: pos.entry_date,
|
| 345 |
-
notional: (pos.qty || 0) * (pos.avg_cost || 0),
|
| 346 |
-
})),
|
| 347 |
-
pnlPct: 0,
|
| 348 |
-
unrealizedPnl: 0,
|
| 349 |
-
},
|
| 350 |
-
scan: {
|
| 351 |
-
universe: (scanData as Record<string, unknown>)?.universe ?? null,
|
| 352 |
-
completed: Boolean((scanData as Record<string, unknown>)?.completed ?? false),
|
| 353 |
-
stage1Count: Object.keys(stage1).length,
|
| 354 |
-
stage2Count: Object.keys(stage2).length,
|
| 355 |
-
updatedAt: (scanData as Record<string, unknown>)?.updated_at ?? null,
|
| 356 |
-
},
|
| 357 |
-
eligibleStocks,
|
| 358 |
-
openTrades: [] as unknown[],
|
| 359 |
-
closedTrades: [] as unknown[],
|
| 360 |
-
signals: [] as unknown[],
|
| 361 |
-
equityCurve: [] as unknown[],
|
| 362 |
-
performance: null as unknown,
|
| 363 |
-
}
|
| 364 |
-
|
| 365 |
-
// For non-BIST markets: call Python _build_market_snapshot to read SQLite data
|
| 366 |
-
if (marketId !== 'bist') {
|
| 367 |
-
try {
|
| 368 |
-
const pyResult = await localSpawnPython([
|
| 369 |
-
'-c',
|
| 370 |
-
`from trading.worker import _build_market_snapshot; import json; print(json.dumps(_build_market_snapshot('${marketId}'), default=str))`,
|
| 371 |
-
], 25000)
|
| 372 |
-
if (pyResult.code === 0 && pyResult.stdout.trim()) {
|
| 373 |
-
const snap = JSON.parse(pyResult.stdout.trim()) as Record<string, unknown>
|
| 374 |
-
const mapped = mapPyMarketSnapshot(snap as Record<string, unknown[]>)
|
| 375 |
-
// Use Python-derived portfolio (has equity, pnlPct, unrealizedPnl)
|
| 376 |
-
const pyPortfolio = snap.portfolio as Record<string, unknown> | undefined
|
| 377 |
-
return {
|
| 378 |
-
...baseSummary,
|
| 379 |
-
portfolio: pyPortfolio ? {
|
| 380 |
-
cash: pyPortfolio.cash as number ?? baseSummary.portfolio.cash,
|
| 381 |
-
equity: pyPortfolio.equity as number ?? baseSummary.portfolio.cash,
|
| 382 |
-
positionCount: pyPortfolio.positionCount as number ?? baseSummary.portfolio.positionCount,
|
| 383 |
-
positions: (pyPortfolio.positions as unknown[]) ?? baseSummary.portfolio.positions,
|
| 384 |
-
pnlPct: pyPortfolio.pnlPct as number ?? 0,
|
| 385 |
-
unrealizedPnl: pyPortfolio.unrealizedPnl as number ?? 0,
|
| 386 |
-
} : baseSummary.portfolio,
|
| 387 |
-
...mapped,
|
| 388 |
-
}
|
| 389 |
-
}
|
| 390 |
-
} catch { /* fall through to base summary */ }
|
| 391 |
-
}
|
| 392 |
-
|
| 393 |
-
return baseSummary
|
| 394 |
-
}
|
| 395 |
-
|
| 396 |
-
async function localGetHandler(): Promise<NextResponse> {
|
| 397 |
-
const paths = getLocalPaths('bist')
|
| 398 |
-
const [defaultMarket, rawTrades, equity, signals, killSwitch, markets] = await Promise.all([
|
| 399 |
-
buildLocalMarketSummary('bist'),
|
| 400 |
-
localReadJsonl(paths.TRADES_FILE),
|
| 401 |
-
localReadJsonl(paths.EQUITY_FILE),
|
| 402 |
-
localReadJsonl(paths.SIGNALS_FILE),
|
| 403 |
-
localReadJson(paths.KILL_FILE),
|
| 404 |
-
Promise.all(SUPPORTED_MARKETS.map(async (marketId) => [marketId, await buildLocalMarketSummary(marketId)] as const)),
|
| 405 |
-
])
|
| 406 |
-
const marketMap = Object.fromEntries(markets)
|
| 407 |
-
|
| 408 |
const trades = rawTrades as unknown as TradeRecord[]
|
| 409 |
const openTrades = trades.filter((t) => !t.is_closed)
|
| 410 |
const closedTrades = trades.filter((t) => t.is_closed)
|
|
@@ -427,30 +254,34 @@ async function localGetHandler(): Promise<NextResponse> {
|
|
| 427 |
if ((t.net_pnl || 0) > 0) symbolStats[t.symbol].wins++
|
| 428 |
})
|
| 429 |
|
| 430 |
-
const cash = Number(
|
| 431 |
-
const
|
|
|
|
| 432 |
const latestEquity = equity.length > 0 ? (equity[equity.length - 1] as unknown as EquityPoint) : null
|
| 433 |
|
| 434 |
return NextResponse.json({
|
| 435 |
status: {
|
| 436 |
-
isRunning:
|
| 437 |
-
lastRunDate:
|
| 438 |
-
totalDaysRun:
|
| 439 |
-
totalTrades:
|
| 440 |
-
currentPhase:
|
| 441 |
killSwitchActive: killSwitch?.active ?? false,
|
| 442 |
killSwitchReason: killSwitch?.reason ?? null,
|
| 443 |
-
workerRunning:
|
| 444 |
-
workerPid:
|
| 445 |
-
lastWorkerStatus
|
| 446 |
-
lastWorkerReason
|
| 447 |
-
lastResult
|
| 448 |
},
|
| 449 |
portfolio: {
|
| 450 |
cash: Math.round(cash * 100) / 100,
|
| 451 |
equity: latestEquity?.equity ?? cash,
|
| 452 |
positionCount,
|
| 453 |
-
positions:
|
|
|
|
|
|
|
|
|
|
| 454 |
pnlPct: latestEquity ? Math.round(((latestEquity.equity / 100000 - 1) * 100) * 100) / 100 : 0,
|
| 455 |
unrealizedPnl: latestEquity?.unrealized_pnl ?? 0,
|
| 456 |
},
|
|
@@ -485,24 +316,21 @@ async function localGetHandler(): Promise<NextResponse> {
|
|
| 485 |
date: s.date, symbol: s.symbol, signal: s.signal, mlSignal: s.ml_signal, techSignal: s.tech_signal,
|
| 486 |
confidence: s.confidence, predictedReturn: s.predicted_return, actionTaken: s.action_taken,
|
| 487 |
})),
|
| 488 |
-
eligibleStocks
|
| 489 |
-
markets: marketMap,
|
| 490 |
-
activeMarketId: SUPPORTED_MARKETS.find((m) => marketMap[m]?.status?.workerRunning) ?? Object.keys(marketMap)[0] ?? 'bist',
|
| 491 |
timestamp: new Date().toISOString(),
|
| 492 |
})
|
| 493 |
}
|
| 494 |
|
| 495 |
async function localPostHandler(body: Record<string, unknown>): Promise<NextResponse> {
|
| 496 |
const { writeFile, mkdir, unlink } = getLocalModules()
|
| 497 |
-
const
|
| 498 |
-
const paths = getLocalPaths(marketId)
|
| 499 |
const { spawn } = getLocalModules()
|
| 500 |
const action = body.action as string
|
| 501 |
|
| 502 |
if (!action) return NextResponse.json({ error: 'Missing action' }, { status: 400 })
|
| 503 |
|
| 504 |
if (action === 'run') {
|
| 505 |
-
const result = await localSpawnPython(['-m', 'trading.worker', '--cycle', '--mode', 'paper'
|
| 506 |
return NextResponse.json({
|
| 507 |
success: result.code === 0, output: result.stdout,
|
| 508 |
error: result.code !== 0 ? (result.stderr || 'Trading cycle failed') : undefined,
|
|
@@ -511,7 +339,7 @@ async function localPostHandler(body: Record<string, unknown>): Promise<NextResp
|
|
| 511 |
}
|
| 512 |
|
| 513 |
if (action === 'run_force') {
|
| 514 |
-
const result = await localSpawnPython(['-m', 'trading.worker', '--cycle', '--mode', 'paper', '--force'
|
| 515 |
return NextResponse.json({
|
| 516 |
success: result.code === 0, output: result.stdout,
|
| 517 |
error: result.code !== 0 ? (result.stderr || 'Trading cycle failed') : undefined,
|
|
@@ -519,64 +347,41 @@ async function localPostHandler(body: Record<string, unknown>): Promise<NextResp
|
|
| 519 |
})
|
| 520 |
}
|
| 521 |
|
| 522 |
-
if (action === 'scan') {
|
| 523 |
-
const defaultUniverse = marketId === 'us' ? 'sp100' : 'bist30'
|
| 524 |
-
const requestedUniverse = String(body.universe || body.reason || defaultUniverse).trim().toLowerCase() || defaultUniverse
|
| 525 |
-
const scriptName = marketId === 'us' ? 'run_us_scan.py' : 'run_bist100_scan.py'
|
| 526 |
-
const args = [scriptName, '--universe', requestedUniverse]
|
| 527 |
-
if (body.force === true) args.push('--force')
|
| 528 |
-
|
| 529 |
-
const proc = spawn('python3', args, {
|
| 530 |
-
cwd: paths.PROJECT_ROOT,
|
| 531 |
-
detached: true,
|
| 532 |
-
stdio: 'ignore',
|
| 533 |
-
env: { ...process.env },
|
| 534 |
-
})
|
| 535 |
-
proc.unref()
|
| 536 |
-
|
| 537 |
-
return NextResponse.json({
|
| 538 |
-
success: true,
|
| 539 |
-
market: marketId,
|
| 540 |
-
universe: requestedUniverse,
|
| 541 |
-
message: `${marketId.toUpperCase()} ${requestedUniverse.toUpperCase()} scan started in background`,
|
| 542 |
-
})
|
| 543 |
-
}
|
| 544 |
-
|
| 545 |
if (action === 'reset') {
|
| 546 |
return NextResponse.json({
|
| 547 |
-
error: '
|
| 548 |
-
hint: '
|
| 549 |
}, { status: 403 })
|
| 550 |
}
|
| 551 |
|
| 552 |
if (action === 'start_worker') {
|
| 553 |
-
const ws = await localIsWorkerRunning(
|
| 554 |
-
if (ws.running) return NextResponse.json({ error: 'Worker
|
| 555 |
-
const proc = spawn('python3', ['-m', 'trading.worker', '--daemon', '--mode', 'paper'
|
| 556 |
cwd: paths.PROJECT_ROOT, detached: true, stdio: 'ignore', env: { ...process.env },
|
| 557 |
})
|
| 558 |
proc.unref()
|
| 559 |
await new Promise((r: (v: unknown) => void) => setTimeout(r, 3000))
|
| 560 |
-
const check = await localIsWorkerRunning(
|
| 561 |
return NextResponse.json({ success: true, pid: proc.pid, workerRunning: check.running })
|
| 562 |
}
|
| 563 |
|
| 564 |
if (action === 'stop_worker') {
|
| 565 |
-
const ws = await localIsWorkerRunning(
|
| 566 |
-
if (!ws.running || !ws.pid) return NextResponse.json({ error: 'Worker
|
| 567 |
try {
|
| 568 |
process.kill(ws.pid, 'SIGTERM')
|
| 569 |
await new Promise((r: (v: unknown) => void) => setTimeout(r, 2000))
|
| 570 |
-
return NextResponse.json({ success: true, message: 'Worker
|
| 571 |
} catch {
|
| 572 |
-
return NextResponse.json({ error: '
|
| 573 |
}
|
| 574 |
}
|
| 575 |
|
| 576 |
if (action === 'kill') {
|
| 577 |
if (!localSafePath(paths.KILL_FILE)) return NextResponse.json({ error: 'Invalid path' }, { status: 400 })
|
| 578 |
-
const {
|
| 579 |
-
const dir =
|
| 580 |
await mkdir(dir, { recursive: true })
|
| 581 |
await writeFile(paths.KILL_FILE, JSON.stringify({
|
| 582 |
active: true, reason: body.reason || 'Manual kill switch from UI',
|
|
|
|
| 119 |
interface SignalRecord { symbol: string; signal: string; price?: number; date?: string; strategy?: string; reason?: string; ml_signal?: string; tech_signal?: string; confidence?: number; predicted_return?: number; action_taken?: string }
|
| 120 |
interface SymbolStat { trades: number; wins: number; pnl: number }
|
| 121 |
interface BrokerPosition { qty?: number; avg_cost?: number; entry_date?: string }
|
|
|
|
|
|
|
| 122 |
|
| 123 |
+
function getLocalPaths() {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 124 |
const { join, resolve } = require('path')
|
| 125 |
+
const PROJECT_ROOT = resolve(process.cwd(), '..')
|
| 126 |
const PAPER_DIR = join(PROJECT_ROOT, 'paper_trading')
|
|
|
|
|
|
|
| 127 |
return {
|
| 128 |
PROJECT_ROOT,
|
| 129 |
PAPER_DIR,
|
| 130 |
+
STATE_FILE: join(PAPER_DIR, 'auto_trader', 'state.json'),
|
| 131 |
+
STATUS_FILE: join(PAPER_DIR, 'auto_trader', 'status.json'),
|
| 132 |
+
TRADES_FILE: join(PAPER_DIR, 'journal', 'trades.jsonl'),
|
| 133 |
+
EQUITY_FILE: join(PAPER_DIR, 'journal', 'daily_pnl.jsonl'),
|
| 134 |
+
SIGNALS_FILE: join(PAPER_DIR, 'journal', 'signals_log.jsonl'),
|
| 135 |
+
KILL_FILE: join(PAPER_DIR, 'kill_switch.json'),
|
| 136 |
+
SCAN_FILE: join(PAPER_DIR, 'bist100_scan_results.json'),
|
| 137 |
+
PID_FILE: join(PAPER_DIR, 'auto_trader', 'worker.pid'),
|
|
|
|
|
|
|
|
|
|
| 138 |
}
|
| 139 |
}
|
| 140 |
|
| 141 |
+
async function localIsWorkerRunning(): Promise<{ running: boolean; pid: number | null }> {
|
| 142 |
const { readFile } = getLocalModules()
|
| 143 |
+
const { PID_FILE } = getLocalPaths()
|
| 144 |
try {
|
| 145 |
const pidStr = await readFile(PID_FILE, 'utf-8')
|
| 146 |
const pid = parseInt(pidStr.trim())
|
|
|
|
| 177 |
|
| 178 |
function localSafePath(filePath: string): boolean {
|
| 179 |
const { resolve: pathResolve } = require('path')
|
| 180 |
+
const { PAPER_DIR } = getLocalPaths()
|
| 181 |
const resolved = pathResolve(filePath)
|
| 182 |
return resolved.startsWith(PAPER_DIR)
|
| 183 |
}
|
|
|
|
| 207 |
} catch { return null }
|
| 208 |
}
|
| 209 |
|
| 210 |
+
async function localGetHandler(): Promise<NextResponse> {
|
| 211 |
+
const paths = getLocalPaths()
|
| 212 |
+
const [state, status, rawTrades, equity, signals, killSwitch, scanData, workerStatus] = await Promise.all([
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
localReadJson(paths.STATE_FILE),
|
| 214 |
localReadJson(paths.STATUS_FILE),
|
| 215 |
+
localReadJsonl(paths.TRADES_FILE),
|
| 216 |
+
localReadJsonl(paths.EQUITY_FILE),
|
| 217 |
+
localReadJsonl(paths.SIGNALS_FILE),
|
| 218 |
+
localReadJson(paths.KILL_FILE),
|
| 219 |
+
localReadJson(paths.SCAN_FILE),
|
| 220 |
+
localIsWorkerRunning(),
|
| 221 |
])
|
| 222 |
|
| 223 |
+
const lastWorkerStatus = (status as any)?.status ?? (status as any)?.last_result?.status ?? null
|
| 224 |
+
const lastWorkerReason = (status as any)?.reason ?? (status as any)?.last_result?.reason ?? null
|
| 225 |
+
const lastResult = (status as any)?.last_result ?? null
|
| 226 |
+
|
| 227 |
let eligibleStocks: string[] = []
|
| 228 |
try {
|
| 229 |
const s2 = (scanData as Record<string, unknown>)?.stage2 as Record<string, { eligible?: boolean }> | undefined
|
|
|
|
| 232 |
}
|
| 233 |
} catch { /* ignore */ }
|
| 234 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 235 |
const trades = rawTrades as unknown as TradeRecord[]
|
| 236 |
const openTrades = trades.filter((t) => !t.is_closed)
|
| 237 |
const closedTrades = trades.filter((t) => t.is_closed)
|
|
|
|
| 254 |
if ((t.net_pnl || 0) > 0) symbolStats[t.symbol].wins++
|
| 255 |
})
|
| 256 |
|
| 257 |
+
const cash = Number(state?.broker_cash ?? 100000)
|
| 258 |
+
const positions = (state?.broker_positions ?? {}) as Record<string, BrokerPosition>
|
| 259 |
+
const positionCount = Object.keys(positions).length
|
| 260 |
const latestEquity = equity.length > 0 ? (equity[equity.length - 1] as unknown as EquityPoint) : null
|
| 261 |
|
| 262 |
return NextResponse.json({
|
| 263 |
status: {
|
| 264 |
+
isRunning: status?.is_running ?? false,
|
| 265 |
+
lastRunDate: state?.last_run_date ?? null,
|
| 266 |
+
totalDaysRun: state?.total_days_run ?? 0,
|
| 267 |
+
totalTrades: state?.total_trades ?? 0,
|
| 268 |
+
currentPhase: status?.current_phase ?? null,
|
| 269 |
killSwitchActive: killSwitch?.active ?? false,
|
| 270 |
killSwitchReason: killSwitch?.reason ?? null,
|
| 271 |
+
workerRunning: workerStatus.running,
|
| 272 |
+
workerPid: workerStatus.pid,
|
| 273 |
+
lastWorkerStatus,
|
| 274 |
+
lastWorkerReason,
|
| 275 |
+
lastResult,
|
| 276 |
},
|
| 277 |
portfolio: {
|
| 278 |
cash: Math.round(cash * 100) / 100,
|
| 279 |
equity: latestEquity?.equity ?? cash,
|
| 280 |
positionCount,
|
| 281 |
+
positions: (Object.entries(positions) as [string, BrokerPosition][]).map(([sym, pos]) => ({
|
| 282 |
+
symbol: sym, quantity: pos.qty, avgCost: pos.avg_cost, entryDate: pos.entry_date,
|
| 283 |
+
notional: (pos.qty || 0) * (pos.avg_cost || 0),
|
| 284 |
+
})),
|
| 285 |
pnlPct: latestEquity ? Math.round(((latestEquity.equity / 100000 - 1) * 100) * 100) / 100 : 0,
|
| 286 |
unrealizedPnl: latestEquity?.unrealized_pnl ?? 0,
|
| 287 |
},
|
|
|
|
| 316 |
date: s.date, symbol: s.symbol, signal: s.signal, mlSignal: s.ml_signal, techSignal: s.tech_signal,
|
| 317 |
confidence: s.confidence, predictedReturn: s.predicted_return, actionTaken: s.action_taken,
|
| 318 |
})),
|
| 319 |
+
eligibleStocks,
|
|
|
|
|
|
|
| 320 |
timestamp: new Date().toISOString(),
|
| 321 |
})
|
| 322 |
}
|
| 323 |
|
| 324 |
async function localPostHandler(body: Record<string, unknown>): Promise<NextResponse> {
|
| 325 |
const { writeFile, mkdir, unlink } = getLocalModules()
|
| 326 |
+
const paths = getLocalPaths()
|
|
|
|
| 327 |
const { spawn } = getLocalModules()
|
| 328 |
const action = body.action as string
|
| 329 |
|
| 330 |
if (!action) return NextResponse.json({ error: 'Missing action' }, { status: 400 })
|
| 331 |
|
| 332 |
if (action === 'run') {
|
| 333 |
+
const result = await localSpawnPython(['-m', 'trading.worker', '--cycle', '--mode', 'paper'], 120000)
|
| 334 |
return NextResponse.json({
|
| 335 |
success: result.code === 0, output: result.stdout,
|
| 336 |
error: result.code !== 0 ? (result.stderr || 'Trading cycle failed') : undefined,
|
|
|
|
| 339 |
}
|
| 340 |
|
| 341 |
if (action === 'run_force') {
|
| 342 |
+
const result = await localSpawnPython(['-m', 'trading.worker', '--cycle', '--mode', 'paper', '--force'], 120000)
|
| 343 |
return NextResponse.json({
|
| 344 |
success: result.code === 0, output: result.stdout,
|
| 345 |
error: result.code !== 0 ? (result.stderr || 'Trading cycle failed') : undefined,
|
|
|
|
| 347 |
})
|
| 348 |
}
|
| 349 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 350 |
if (action === 'reset') {
|
| 351 |
return NextResponse.json({
|
| 352 |
+
error: 'Portföy sıfırlama CLI gerektirir: python -m trading.auto_trader --reset',
|
| 353 |
+
hint: 'Güvenlik nedeniyle portföy sıfırlama web arayüzünden yapılamaz.',
|
| 354 |
}, { status: 403 })
|
| 355 |
}
|
| 356 |
|
| 357 |
if (action === 'start_worker') {
|
| 358 |
+
const ws = await localIsWorkerRunning()
|
| 359 |
+
if (ws.running) return NextResponse.json({ error: 'Worker zaten çalışıyor', pid: ws.pid }, { status: 409 })
|
| 360 |
+
const proc = spawn('python3', ['-m', 'trading.worker', '--daemon', '--mode', 'paper'], {
|
| 361 |
cwd: paths.PROJECT_ROOT, detached: true, stdio: 'ignore', env: { ...process.env },
|
| 362 |
})
|
| 363 |
proc.unref()
|
| 364 |
await new Promise((r: (v: unknown) => void) => setTimeout(r, 3000))
|
| 365 |
+
const check = await localIsWorkerRunning()
|
| 366 |
return NextResponse.json({ success: true, pid: proc.pid, workerRunning: check.running })
|
| 367 |
}
|
| 368 |
|
| 369 |
if (action === 'stop_worker') {
|
| 370 |
+
const ws = await localIsWorkerRunning()
|
| 371 |
+
if (!ws.running || !ws.pid) return NextResponse.json({ error: 'Worker çalışmıyor' }, { status: 404 })
|
| 372 |
try {
|
| 373 |
process.kill(ws.pid, 'SIGTERM')
|
| 374 |
await new Promise((r: (v: unknown) => void) => setTimeout(r, 2000))
|
| 375 |
+
return NextResponse.json({ success: true, message: 'Worker durduruldu' })
|
| 376 |
} catch {
|
| 377 |
+
return NextResponse.json({ error: 'Worker durdurma hatası' }, { status: 500 })
|
| 378 |
}
|
| 379 |
}
|
| 380 |
|
| 381 |
if (action === 'kill') {
|
| 382 |
if (!localSafePath(paths.KILL_FILE)) return NextResponse.json({ error: 'Invalid path' }, { status: 400 })
|
| 383 |
+
const { join } = require('path')
|
| 384 |
+
const dir = join(paths.PAPER_DIR, 'auto_trader')
|
| 385 |
await mkdir(dir, { recursive: true })
|
| 386 |
await writeFile(paths.KILL_FILE, JSON.stringify({
|
| 387 |
active: true, reason: body.reason || 'Manual kill switch from UI',
|
huggingface-space/nextjs-app/src/app/auto-trading/page.tsx
CHANGED
|
@@ -2,7 +2,6 @@
|
|
| 2 |
|
| 3 |
import { useState, useEffect, useCallback } from 'react'
|
| 4 |
import { fetchJson } from '@/lib/http'
|
| 5 |
-
import { useMarket } from '@/contexts/MarketContext'
|
| 6 |
import PositionChart from '@/components/PositionChart'
|
| 7 |
import {
|
| 8 |
Play,
|
|
@@ -120,40 +119,6 @@ interface Signal {
|
|
| 120 |
actionTaken: string
|
| 121 |
}
|
| 122 |
|
| 123 |
-
interface MarketSummary {
|
| 124 |
-
marketId: string
|
| 125 |
-
displayName: string
|
| 126 |
-
currency: string
|
| 127 |
-
status: {
|
| 128 |
-
isRunning: boolean
|
| 129 |
-
workerRunning: boolean
|
| 130 |
-
workerPid: number | null
|
| 131 |
-
lastRunDate: string | null
|
| 132 |
-
totalDaysRun: number
|
| 133 |
-
totalTrades: number
|
| 134 |
-
currentPhase: string | null
|
| 135 |
-
killSwitchActive?: boolean
|
| 136 |
-
killSwitchReason?: string | null
|
| 137 |
-
lastWorkerStatus?: string | null
|
| 138 |
-
lastWorkerReason?: string | null
|
| 139 |
-
lastResult?: { status?: string; reason?: string; date?: string } | null
|
| 140 |
-
}
|
| 141 |
-
portfolio: Portfolio
|
| 142 |
-
scan: {
|
| 143 |
-
universe: string | null
|
| 144 |
-
completed: boolean
|
| 145 |
-
stage1Count: number
|
| 146 |
-
stage2Count: number
|
| 147 |
-
updatedAt: string | null
|
| 148 |
-
}
|
| 149 |
-
openTrades?: OpenTrade[]
|
| 150 |
-
closedTrades?: ClosedTrade[]
|
| 151 |
-
equityCurve?: EquityPoint[]
|
| 152 |
-
signals?: Signal[]
|
| 153 |
-
performance?: Performance
|
| 154 |
-
eligibleStocks: string[]
|
| 155 |
-
}
|
| 156 |
-
|
| 157 |
interface TradingData {
|
| 158 |
status: TradingStatus
|
| 159 |
portfolio: Portfolio
|
|
@@ -163,102 +128,22 @@ interface TradingData {
|
|
| 163 |
performance: Performance
|
| 164 |
signals: Signal[]
|
| 165 |
eligibleStocks: string[]
|
| 166 |
-
markets?: Record<string, MarketSummary>
|
| 167 |
-
activeMarketId?: string
|
| 168 |
timestamp: string
|
| 169 |
error?: string
|
| 170 |
}
|
| 171 |
|
| 172 |
// ─── Formatting Helpers ────────────────────
|
| 173 |
-
function fmtMoney(n: number
|
| 174 |
-
|
| 175 |
-
const currency = isUS ? 'USD' : 'TRY'
|
| 176 |
-
return new Intl.NumberFormat(locale, { style: 'currency', currency }).format(n)
|
| 177 |
-
}
|
| 178 |
-
|
| 179 |
-
function fmtMoneyByCurrency(n: number, currency: string, isUS = false) {
|
| 180 |
-
const locale = isUS ? 'en-US' : 'tr-TR'
|
| 181 |
-
return new Intl.NumberFormat(locale, { style: 'currency', currency }).format(n)
|
| 182 |
}
|
| 183 |
-
|
| 184 |
function fmtPct(n: number) {
|
| 185 |
return `%${n >= 0 ? '+' : ''}${n.toFixed(2)}`
|
| 186 |
}
|
| 187 |
|
| 188 |
-
function describeSignalAction(action: string, isUS = false) {
|
| 189 |
-
const a = (action || '').toUpperCase()
|
| 190 |
-
const t = (tr: string, en: string) => isUS ? en : tr
|
| 191 |
-
switch (a) {
|
| 192 |
-
case 'BUY_EXECUTED':
|
| 193 |
-
return t('Alım yapıldı', 'Buy executed')
|
| 194 |
-
case 'BUY_REJECTED':
|
| 195 |
-
return t('Alım reddedildi', 'Buy rejected')
|
| 196 |
-
case 'BUY_SIGNAL':
|
| 197 |
-
return t('Alım adayı', 'Buy candidate')
|
| 198 |
-
case 'BUY_BELOW_CONFIDENCE':
|
| 199 |
-
return t('Alım, güven eşiği altında', 'Buy, below confidence')
|
| 200 |
-
case 'BUY_SKIPPED_NO_SLOT':
|
| 201 |
-
return t('Alım, slot yok', 'Buy, no slot')
|
| 202 |
-
case 'BUY_SKIPPED_LOW_PRIORITY':
|
| 203 |
-
return t('Alım, düşük öncelik', 'Buy, low priority')
|
| 204 |
-
case 'BUY_SKIPPED_ALREADY_HELD':
|
| 205 |
-
return t('Alım, zaten pozisyonda', 'Buy, already held')
|
| 206 |
-
case 'BUY_SKIPPED_INVALID_PRICE':
|
| 207 |
-
return t('Alım, fiyat geçersiz', 'Buy, invalid price')
|
| 208 |
-
case 'BUY_SKIPPED_CORRELATED':
|
| 209 |
-
return t('Alım, korelasyon nedeniyle atlandı', 'Buy, skipped (correlated)')
|
| 210 |
-
case 'BUY_SKIPPED_ZERO_QTY':
|
| 211 |
-
return t('Alım, adet sıfıra yuvarlandı', 'Buy, zero qty')
|
| 212 |
-
case 'BUY_BLOCKED_RISK_GATE':
|
| 213 |
-
return t('Alım, risk kapısında engellendi', 'Buy, blocked by risk gate')
|
| 214 |
-
case 'BUY_BLOCKED_CIRCUIT_BREAKER':
|
| 215 |
-
return t('Alım, circuit breaker engeli', 'Buy, circuit breaker')
|
| 216 |
-
case 'SELL_EXECUTED:ML_SELL_SIGNAL':
|
| 217 |
-
return t('Satış yapıldı, ML sell', 'Sell executed, ML signal')
|
| 218 |
-
case 'SELL_EXECUTED:STOP_LOSS':
|
| 219 |
-
return t('Satış yapıldı, stop-loss', 'Sell executed, stop-loss')
|
| 220 |
-
case 'SELL_EXECUTED:TAKE_PROFIT':
|
| 221 |
-
return t('Satış yapıldı, take-profit', 'Sell executed, take-profit')
|
| 222 |
-
case 'SELL_EXECUTED:HOLDING_PERIOD_EXPIRY':
|
| 223 |
-
return t('Satış yapıldı, süre doldu', 'Sell executed, holding expired')
|
| 224 |
-
case 'SELL_SIGNAL':
|
| 225 |
-
return t('Satış sinyali', 'Sell signal')
|
| 226 |
-
case 'SELL_NO_POSITION':
|
| 227 |
-
return t('Satış sinyali, pozisyon yok', 'Sell signal, no position')
|
| 228 |
-
case 'SELL_BLOCKED_RISK_GATE':
|
| 229 |
-
return t('Satış, risk kapısında engellendi', 'Sell, blocked by risk gate')
|
| 230 |
-
case 'SELL_REJECTED':
|
| 231 |
-
return t('Satış reddedildi', 'Sell rejected')
|
| 232 |
-
case 'HOLD':
|
| 233 |
-
return t('Bekle', 'Hold')
|
| 234 |
-
case 'PENDING':
|
| 235 |
-
return t('Eski kayıt, sonuç yazılmadı', 'Old record, no result')
|
| 236 |
-
default:
|
| 237 |
-
return action || '—'
|
| 238 |
-
}
|
| 239 |
-
}
|
| 240 |
-
|
| 241 |
// ─── Tab Type ──────────────────────────────
|
| 242 |
type TabKey = 'portfolio' | 'trades' | 'signals' | 'performance'
|
| 243 |
|
| 244 |
-
const EMPTY_PERFORMANCE: Performance = {
|
| 245 |
-
totalPnl: 0,
|
| 246 |
-
closedTradesCount: 0,
|
| 247 |
-
openTradesCount: 0,
|
| 248 |
-
winRate: 0,
|
| 249 |
-
profitFactor: 0,
|
| 250 |
-
avgWin: 0,
|
| 251 |
-
avgLoss: 0,
|
| 252 |
-
bestTrade: 0,
|
| 253 |
-
worstTrade: 0,
|
| 254 |
-
symbolBreakdown: [],
|
| 255 |
-
}
|
| 256 |
-
|
| 257 |
export default function AutoTradingPage() {
|
| 258 |
-
const { market } = useMarket()
|
| 259 |
-
const isUS = market === 'us'
|
| 260 |
-
const t = (tr: string, en: string) => isUS ? en : tr
|
| 261 |
-
|
| 262 |
const [data, setData] = useState<TradingData | null>(null)
|
| 263 |
const [loading, setLoading] = useState(true)
|
| 264 |
const [error, setError] = useState<string | null>(null)
|
|
@@ -266,12 +151,6 @@ export default function AutoTradingPage() {
|
|
| 266 |
const [actionMessage, setActionMessage] = useState<string | null>(null)
|
| 267 |
const [activeTab, setActiveTab] = useState<TabKey>('portfolio')
|
| 268 |
const [showEquity, setShowEquity] = useState(true)
|
| 269 |
-
const [selectedMarketId, setSelectedMarketId] = useState<string>(market)
|
| 270 |
-
|
| 271 |
-
// Sync selectedMarketId when global market context changes
|
| 272 |
-
useEffect(() => {
|
| 273 |
-
setSelectedMarketId(market)
|
| 274 |
-
}, [market])
|
| 275 |
|
| 276 |
const loadData = useCallback(async () => {
|
| 277 |
setLoading(true)
|
|
@@ -280,15 +159,12 @@ export default function AutoTradingPage() {
|
|
| 280 |
const res = await fetchJson<TradingData>('/api/trading', undefined, { timeoutMs: 20000 })
|
| 281 |
if (res.error) throw new Error(res.error)
|
| 282 |
setData(res)
|
| 283 |
-
// Do NOT override selectedMarketId with a fallback market.
|
| 284 |
-
// The user's explicit market selection (from global context) takes
|
| 285 |
-
// priority even when the snapshot has no data for that market.
|
| 286 |
} catch (e: unknown) {
|
| 287 |
-
setError(e instanceof Error ? e.message :
|
| 288 |
} finally {
|
| 289 |
setLoading(false)
|
| 290 |
}
|
| 291 |
-
}, [
|
| 292 |
|
| 293 |
useEffect(() => {
|
| 294 |
loadData()
|
|
@@ -307,7 +183,7 @@ export default function AutoTradingPage() {
|
|
| 307 |
}>('/api/trading', {
|
| 308 |
method: 'POST',
|
| 309 |
headers: { 'Content-Type': 'application/json' },
|
| 310 |
-
body: JSON.stringify({ action
|
| 311 |
}, { timeoutMs: 130000 })
|
| 312 |
|
| 313 |
// Handle 403 gracefully (run/reset moved to worker)
|
|
@@ -318,26 +194,26 @@ export default function AutoTradingPage() {
|
|
| 318 |
if (res.error) throw new Error(res.error)
|
| 319 |
|
| 320 |
if (action === 'kill') {
|
| 321 |
-
setActionMessage(
|
| 322 |
} else if (action === 'unkill') {
|
| 323 |
-
setActionMessage(
|
| 324 |
} else if (action === 'start_worker') {
|
| 325 |
-
setActionMessage(
|
| 326 |
} else if (action === 'stop_worker') {
|
| 327 |
-
setActionMessage(
|
| 328 |
} else if (action === 'run') {
|
| 329 |
-
setActionMessage(
|
| 330 |
} else if (action === 'run_force') {
|
| 331 |
-
setActionMessage(
|
| 332 |
} else {
|
| 333 |
-
setActionMessage(
|
| 334 |
}
|
| 335 |
|
| 336 |
// Reload data after action
|
| 337 |
await loadData()
|
| 338 |
} catch (e: unknown) {
|
| 339 |
-
const msg = e instanceof Error ? e.message :
|
| 340 |
-
setActionMessage(`
|
| 341 |
} finally {
|
| 342 |
setActionLoading(null)
|
| 343 |
}
|
|
@@ -349,7 +225,7 @@ export default function AutoTradingPage() {
|
|
| 349 |
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
|
| 350 |
<div className="flex flex-col items-center gap-3">
|
| 351 |
<RefreshCw className="w-8 h-8 text-blue-400 animate-spin" />
|
| 352 |
-
<p className="text-gray-500">
|
| 353 |
</div>
|
| 354 |
</div>
|
| 355 |
)
|
|
@@ -365,7 +241,7 @@ export default function AutoTradingPage() {
|
|
| 365 |
onClick={loadData}
|
| 366 |
className="px-4 py-2 bg-red-600 hover:bg-red-500 text-white rounded-lg text-sm"
|
| 367 |
>
|
| 368 |
-
|
| 369 |
</button>
|
| 370 |
</div>
|
| 371 |
</div>
|
|
@@ -374,31 +250,9 @@ export default function AutoTradingPage() {
|
|
| 374 |
|
| 375 |
if (!data) return null
|
| 376 |
|
| 377 |
-
const { status, portfolio, performance, openTrades, closedTrades, signals, equityCurve, eligibleStocks
|
| 378 |
-
const
|
| 379 |
-
const
|
| 380 |
-
const selectedMarket = marketLookup[selectedMarketId]
|
| 381 |
-
const selectedStatus = selectedMarket
|
| 382 |
-
? {
|
| 383 |
-
...status,
|
| 384 |
-
...selectedMarket.status,
|
| 385 |
-
killSwitchActive: selectedMarket.status?.killSwitchActive ?? status.killSwitchActive,
|
| 386 |
-
killSwitchReason: selectedMarket.status?.killSwitchReason ?? status.killSwitchReason ?? null,
|
| 387 |
-
lastResult: selectedMarket.status?.lastResult || null,
|
| 388 |
-
}
|
| 389 |
-
: status
|
| 390 |
-
const selectedPortfolio = selectedMarket?.portfolio ?? (selectedMarketId !== 'us' ? portfolio : { cash: 0, equity: 0, positionCount: 0, positions: [] as Portfolio['positions'], pnlPct: 0, unrealizedPnl: 0 })
|
| 391 |
-
const selectedPerformance = selectedMarket?.performance || EMPTY_PERFORMANCE
|
| 392 |
-
const selectedOpenTrades = selectedMarket?.openTrades || []
|
| 393 |
-
const selectedClosedTrades = selectedMarket?.closedTrades || []
|
| 394 |
-
const selectedSignals = selectedMarket?.signals || []
|
| 395 |
-
const selectedEquityCurve = selectedMarket?.equityCurve || []
|
| 396 |
-
const selectedEligibleStocks = selectedMarket?.eligibleStocks || []
|
| 397 |
-
const selectedCurrency = selectedMarket?.currency || (selectedMarketId === 'us' ? 'USD' : 'TRY')
|
| 398 |
-
const selectedMarketLabel = selectedMarket?.displayName || (selectedMarketId === 'us' ? 'US Equities' : 'Borsa Istanbul')
|
| 399 |
-
const lastStatus = selectedStatus.lastWorkerStatus || selectedStatus.lastResult?.status || null
|
| 400 |
-
const lastReason = selectedStatus.lastWorkerReason || selectedStatus.lastResult?.reason || null
|
| 401 |
-
const latestSignal = selectedSignals[0] || null
|
| 402 |
|
| 403 |
// ─── Main Render ────────────────────────
|
| 404 |
return (
|
|
@@ -408,32 +262,32 @@ export default function AutoTradingPage() {
|
|
| 408 |
<div>
|
| 409 |
<h1 className="text-2xl font-bold flex items-center gap-2">
|
| 410 |
<Zap className="w-7 h-7 text-yellow-400" />
|
| 411 |
-
|
| 412 |
</h1>
|
| 413 |
<p className="text-gray-500 text-sm mt-1">
|
| 414 |
-
|
| 415 |
</p>
|
| 416 |
</div>
|
| 417 |
<div className="flex items-center gap-2 flex-wrap">
|
| 418 |
{/* Worker Status Indicator */}
|
| 419 |
<div className="flex items-center gap-2 px-4 py-2 bg-gray-100 rounded-lg text-sm">
|
| 420 |
-
<Server className={`w-4 h-4 ${
|
| 421 |
-
<span className={
|
| 422 |
-
{
|
| 423 |
</span>
|
| 424 |
</div>
|
| 425 |
|
| 426 |
{/* Kill Switch */}
|
| 427 |
-
{
|
| 428 |
<button
|
| 429 |
onClick={() => runAction('unkill')}
|
| 430 |
disabled={!!actionLoading}
|
| 431 |
className="flex items-center gap-2 px-4 py-2 bg-yellow-600 hover:bg-yellow-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 432 |
>
|
| 433 |
<Shield className="w-4 h-4" />
|
| 434 |
-
|
| 435 |
</button>
|
| 436 |
-
) :
|
| 437 |
<button
|
| 438 |
onClick={() => runAction('kill')}
|
| 439 |
disabled={!!actionLoading}
|
|
@@ -442,7 +296,7 @@ export default function AutoTradingPage() {
|
|
| 442 |
<Square className="w-4 h-4" />
|
| 443 |
Kill Switch
|
| 444 |
</button>
|
| 445 |
-
)
|
| 446 |
|
| 447 |
{/* Refresh */}
|
| 448 |
<button
|
|
@@ -454,27 +308,9 @@ export default function AutoTradingPage() {
|
|
| 454 |
</div>
|
| 455 |
</div>
|
| 456 |
|
| 457 |
-
{marketSummaries.length > 0 && (
|
| 458 |
-
<div className="flex flex-wrap gap-2 mb-6">
|
| 459 |
-
{marketSummaries.map((market) => (
|
| 460 |
-
<button
|
| 461 |
-
key={market.marketId}
|
| 462 |
-
onClick={() => setSelectedMarketId(market.marketId)}
|
| 463 |
-
className={`px-4 py-2 rounded-full text-sm font-medium border transition-colors ${
|
| 464 |
-
selectedMarketId === market.marketId
|
| 465 |
-
? 'bg-blue-600 border-blue-600 text-white'
|
| 466 |
-
: 'bg-white border-gray-200 text-gray-600 hover:border-blue-300 hover:text-blue-600'
|
| 467 |
-
}`}
|
| 468 |
-
>
|
| 469 |
-
{market.displayName}
|
| 470 |
-
</button>
|
| 471 |
-
))}
|
| 472 |
-
</div>
|
| 473 |
-
)}
|
| 474 |
-
|
| 475 |
{actionMessage && (
|
| 476 |
<div className={`border rounded-lg px-4 py-3 mb-6 text-sm ${
|
| 477 |
-
actionMessage.startsWith('Hata:') || actionMessage.
|
| 478 |
? 'bg-amber-900/30 border-amber-600/50 text-amber-200'
|
| 479 |
: 'bg-white border-gray-200 text-gray-700'
|
| 480 |
}`}>
|
|
@@ -487,23 +323,21 @@ export default function AutoTradingPage() {
|
|
| 487 |
<div className="bg-white border border-gray-200 rounded-lg p-4 mb-6">
|
| 488 |
<h3 className="font-semibold text-gray-700 mb-2 flex items-center gap-2">
|
| 489 |
<Server className="w-5 h-5 text-blue-500" />
|
| 490 |
-
|
| 491 |
</h3>
|
| 492 |
<p className="text-sm text-gray-500 mb-4">
|
| 493 |
-
|
| 494 |
-
|
| 495 |
-
'Worker generates ML signals on eligible stocks for the selected market and paper trades automatically. Runs once daily. Manages positions with stop-loss (5%), take-profit (10%) and max holding period (9 days).'
|
| 496 |
-
)}
|
| 497 |
</p>
|
| 498 |
<div className="flex flex-wrap gap-3">
|
| 499 |
-
{!
|
| 500 |
<button
|
| 501 |
onClick={() => runAction('start_worker')}
|
| 502 |
disabled={!!actionLoading}
|
| 503 |
className="flex items-center gap-2 px-4 py-2.5 bg-green-600 hover:bg-green-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 504 |
>
|
| 505 |
{actionLoading === 'start_worker' ? <RefreshCw className="w-4 h-4 animate-spin" /> : <Play className="w-4 h-4" />}
|
| 506 |
-
|
| 507 |
</button>
|
| 508 |
) : (
|
| 509 |
<button
|
|
@@ -512,40 +346,40 @@ export default function AutoTradingPage() {
|
|
| 512 |
className="flex items-center gap-2 px-4 py-2.5 bg-red-600 hover:bg-red-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 513 |
>
|
| 514 |
{actionLoading === 'stop_worker' ? <RefreshCw className="w-4 h-4 animate-spin" /> : <Square className="w-4 h-4" />}
|
| 515 |
-
|
| 516 |
</button>
|
| 517 |
)}
|
| 518 |
<button
|
| 519 |
onClick={() => runAction('run')}
|
| 520 |
-
disabled={!!actionLoading ||
|
| 521 |
className="flex items-center gap-2 px-4 py-2.5 bg-blue-600 hover:bg-blue-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 522 |
-
title={
|
| 523 |
>
|
| 524 |
{actionLoading === 'run' ? <RefreshCw className="w-4 h-4 animate-spin" /> : <Zap className="w-4 h-4" />}
|
| 525 |
-
|
| 526 |
</button>
|
| 527 |
<button
|
| 528 |
onClick={() => runAction('run_force')}
|
| 529 |
-
disabled={!!actionLoading ||
|
| 530 |
className="flex items-center gap-2 px-4 py-2.5 bg-yellow-600 hover:bg-yellow-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 531 |
-
title={
|
| 532 |
>
|
| 533 |
{actionLoading === 'run_force' ? <RefreshCw className="w-4 h-4 animate-spin" /> : <RotateCcw className="w-4 h-4" />}
|
| 534 |
-
|
| 535 |
</button>
|
| 536 |
</div>
|
| 537 |
-
{
|
| 538 |
-
<p className="text-xs text-gray-400 mt-2">Worker PID: {
|
| 539 |
)}
|
| 540 |
</div>
|
| 541 |
|
| 542 |
{/* Kill Switch Warning */}
|
| 543 |
-
{
|
| 544 |
<div className="bg-red-900/40 border border-red-500/60 rounded-lg p-4 mb-6 flex items-center gap-3">
|
| 545 |
<ShieldOff className="w-6 h-6 text-red-400 flex-shrink-0" />
|
| 546 |
<div>
|
| 547 |
-
<p className="text-red-300 font-medium">
|
| 548 |
-
<p className="text-red-400/70 text-sm">{
|
| 549 |
</div>
|
| 550 |
</div>
|
| 551 |
)}
|
|
@@ -554,139 +388,62 @@ export default function AutoTradingPage() {
|
|
| 554 |
<div className="grid grid-cols-2 md:grid-cols-4 lg:grid-cols-6 gap-3 mb-6">
|
| 555 |
<StatusCard
|
| 556 |
icon={<Activity className="w-5 h-5" />}
|
| 557 |
-
label=
|
| 558 |
-
value={
|
| 559 |
-
color={
|
| 560 |
/>
|
| 561 |
<StatusCard
|
| 562 |
icon={<Clock className="w-5 h-5" />}
|
| 563 |
-
label=
|
| 564 |
-
value={
|
| 565 |
color="text-blue-400"
|
| 566 |
/>
|
| 567 |
<StatusCard
|
| 568 |
icon={<Wallet className="w-5 h-5" />}
|
| 569 |
-
label=
|
| 570 |
-
value={
|
| 571 |
color="text-green-400"
|
| 572 |
/>
|
| 573 |
<StatusCard
|
| 574 |
icon={<DollarSign className="w-5 h-5" />}
|
| 575 |
-
label=
|
| 576 |
-
value={
|
| 577 |
color="text-blue-400"
|
| 578 |
/>
|
| 579 |
<StatusCard
|
| 580 |
-
icon={
|
| 581 |
label="P&L"
|
| 582 |
-
value={fmtPct(
|
| 583 |
-
color={
|
| 584 |
/>
|
| 585 |
<StatusCard
|
| 586 |
icon={<Target className="w-5 h-5" />}
|
| 587 |
-
label=
|
| 588 |
-
value={`${
|
| 589 |
color="text-yellow-400"
|
| 590 |
/>
|
| 591 |
</div>
|
| 592 |
|
| 593 |
-
{
|
| 594 |
-
<div className="bg-white border border-gray-200 rounded-lg p-4 mb-6">
|
| 595 |
-
<div className="flex flex-col lg:flex-row lg:items-center lg:justify-between gap-4">
|
| 596 |
-
<div>
|
| 597 |
-
<div className="text-xs uppercase tracking-wide text-gray-500 mb-1">{t('Son Sinyal', 'Latest Signal')}</div>
|
| 598 |
-
<div className="flex items-center gap-2 flex-wrap">
|
| 599 |
-
<span className="text-lg font-semibold text-gray-900">{latestSignal.symbol}</span>
|
| 600 |
-
<SignalBadge signal={latestSignal.signal} />
|
| 601 |
-
<ActionBadge action={latestSignal.actionTaken} isUS={isUS} />
|
| 602 |
-
</div>
|
| 603 |
-
</div>
|
| 604 |
-
<div className="grid grid-cols-2 md:grid-cols-4 gap-3 text-sm">
|
| 605 |
-
<MiniMetric label="ML" value={latestSignal.mlSignal || '—'} />
|
| 606 |
-
<MiniMetric label={t('Teknik', 'Technical')} value={latestSignal.techSignal || '—'} />
|
| 607 |
-
<MiniMetric label={t('Güven', 'Confidence')} value={`%${(latestSignal.confidence * 100).toFixed(0)}`} />
|
| 608 |
-
<MiniMetric label={t('Tahmin', 'Forecast')} value={`%${(latestSignal.predictedReturn * 100).toFixed(1)}`} />
|
| 609 |
-
</div>
|
| 610 |
-
</div>
|
| 611 |
-
</div>
|
| 612 |
-
)}
|
| 613 |
-
|
| 614 |
-
{marketSummaries.length > 0 && (
|
| 615 |
-
<div className="bg-white border border-gray-200 rounded-lg p-4 mb-6">
|
| 616 |
-
<h3 className="font-semibold text-gray-700 mb-3 flex items-center gap-2">
|
| 617 |
-
<Server className="w-5 h-5 text-indigo-500" />
|
| 618 |
-
{t('Çoklu Market Durumu', 'Multi-Market Status')}
|
| 619 |
-
</h3>
|
| 620 |
-
<div className="grid grid-cols-1 lg:grid-cols-2 gap-3">
|
| 621 |
-
{marketSummaries.map((market) => {
|
| 622 |
-
const summaryStatus = market.status.lastWorkerStatus || (market.status.workerRunning ? 'RUNNING' : 'IDLE')
|
| 623 |
-
const summaryReason = market.status.lastWorkerReason || market.scan.universe || '—'
|
| 624 |
-
return (
|
| 625 |
-
<div key={market.marketId} className="rounded-lg border border-gray-200 bg-gray-50 p-4">
|
| 626 |
-
<div className="flex items-start justify-between gap-3 mb-3">
|
| 627 |
-
<div>
|
| 628 |
-
<h4 className="font-semibold text-gray-900">{market.displayName}</h4>
|
| 629 |
-
<p className="text-xs text-gray-500 uppercase tracking-wide">{market.marketId} · {market.currency}</p>
|
| 630 |
-
</div>
|
| 631 |
-
<span className={`px-2.5 py-1 rounded-full text-xs font-medium ${market.status.workerRunning ? 'bg-green-100 text-green-700' : 'bg-gray-200 text-gray-600'}`}>
|
| 632 |
-
{market.status.workerRunning ? t('Worker Aktif', 'Worker Active') : t('Worker Pasif', 'Worker Inactive')}
|
| 633 |
-
</span>
|
| 634 |
-
</div>
|
| 635 |
-
<div className="grid grid-cols-2 gap-3 text-sm mb-3">
|
| 636 |
-
<MiniMetric label={t('Nakit', 'Cash')} value={fmtMoneyByCurrency(market.portfolio.cash, market.currency, isUS)} />
|
| 637 |
-
<MiniMetric label={t('Pozisyon', 'Positions')} value={`${market.portfolio.positionCount}`} />
|
| 638 |
-
<MiniMetric label="Eligible" value={`${market.eligibleStocks.length}`} />
|
| 639 |
-
<MiniMetric label={t('Tarama', 'Scan')} value={`${market.scan.stage1Count}/${market.scan.stage2Count}`} />
|
| 640 |
-
</div>
|
| 641 |
-
<div className="text-xs text-gray-500 space-y-1">
|
| 642 |
-
<div><span className="font-medium text-gray-700">{t('Durum', 'Status')}:</span> {summaryStatus}</div>
|
| 643 |
-
<div><span className="font-medium text-gray-700">{t('Kaynak', 'Source')}:</span> {summaryReason}</div>
|
| 644 |
-
<div><span className="font-medium text-gray-700">{t('Son Çalışma', 'Last Run')}:</span> {market.status.lastRunDate || '—'}</div>
|
| 645 |
-
</div>
|
| 646 |
-
{market.eligibleStocks.length > 0 && (
|
| 647 |
-
<div className="mt-3 flex flex-wrap gap-2">
|
| 648 |
-
{market.eligibleStocks.slice(0, 10).map((symbol) => (
|
| 649 |
-
<span key={`${market.marketId}-${symbol}`} className="px-2.5 py-1 rounded-full border border-blue-200 bg-blue-50 text-blue-700 text-xs font-medium">
|
| 650 |
-
{symbol}
|
| 651 |
-
</span>
|
| 652 |
-
))}
|
| 653 |
-
{market.eligibleStocks.length > 10 && (
|
| 654 |
-
<span className="px-2.5 py-1 rounded-full border border-gray-300 bg-white text-gray-600 text-xs font-medium">
|
| 655 |
-
+{market.eligibleStocks.length - 10}
|
| 656 |
-
</span>
|
| 657 |
-
)}
|
| 658 |
-
</div>
|
| 659 |
-
)}
|
| 660 |
-
</div>
|
| 661 |
-
)
|
| 662 |
-
})}
|
| 663 |
-
</div>
|
| 664 |
-
</div>
|
| 665 |
-
)}
|
| 666 |
-
|
| 667 |
-
{!selectedStatus.isRunning && (lastStatus || lastReason) && (
|
| 668 |
<div className="bg-white border border-gray-200 rounded-lg px-4 py-3 mb-6 text-sm text-gray-600">
|
| 669 |
-
<span className="font-medium text-gray-700">
|
| 670 |
{lastStatus || '—'}
|
| 671 |
{lastReason ? ` — ${lastReason}` : ''}
|
| 672 |
</div>
|
| 673 |
)}
|
| 674 |
|
| 675 |
{/* Eligible Stocks (Trading Pool) */}
|
| 676 |
-
{
|
| 677 |
<div className="bg-white border border-gray-200 rounded-lg p-4 mb-6">
|
| 678 |
<h3 className="font-semibold text-gray-700 mb-2 flex items-center gap-2">
|
| 679 |
<Target className="w-5 h-5 text-green-500" />
|
| 680 |
-
Trade
|
| 681 |
</h3>
|
| 682 |
<p className="text-sm text-gray-500 mb-3">
|
| 683 |
-
|
| 684 |
-
'ML tarama sonuçlarına göre alım yapılabilecek hisseler. Worker bu havuzdan günlük sinyal üretir ve uygun olanlara pozisyon açar.',
|
| 685 |
-
'Stocks eligible for buying based on ML scan results. Worker generates daily signals from this pool and opens positions on suitable ones.'
|
| 686 |
-
)}
|
| 687 |
</p>
|
| 688 |
<div className="flex flex-wrap gap-2">
|
| 689 |
-
{
|
| 690 |
<span key={sym} className="px-3 py-1.5 bg-green-50 text-green-700 border border-green-200 rounded-full text-sm font-medium">
|
| 691 |
{sym}
|
| 692 |
</span>
|
|
@@ -696,7 +453,7 @@ export default function AutoTradingPage() {
|
|
| 696 |
)}
|
| 697 |
|
| 698 |
{/* Equity Curve (simple text-based) */}
|
| 699 |
-
{
|
| 700 |
<div className="bg-white border border-gray-200 rounded-lg mb-6 overflow-hidden">
|
| 701 |
<button
|
| 702 |
onClick={() => setShowEquity(!showEquity)}
|
|
@@ -704,32 +461,32 @@ export default function AutoTradingPage() {
|
|
| 704 |
>
|
| 705 |
<span className="flex items-center gap-2 font-semibold">
|
| 706 |
<BarChart3 className="w-5 h-5 text-blue-400" />
|
| 707 |
-
|
| 708 |
</span>
|
| 709 |
{showEquity ? <ChevronUp className="w-5 h-5 text-gray-500" /> : <ChevronDown className="w-5 h-5 text-gray-500" />}
|
| 710 |
</button>
|
| 711 |
{showEquity && (
|
| 712 |
<div className="px-4 pb-4">
|
| 713 |
<div className="flex items-end gap-[2px] h-32">
|
| 714 |
-
{
|
| 715 |
-
const min = Math.min(...
|
| 716 |
-
const max = Math.max(...
|
| 717 |
const range = max - min || 1
|
| 718 |
const height = ((pt.equity - min) / range) * 100
|
| 719 |
-
const isLast = i ===
|
| 720 |
return (
|
| 721 |
<div
|
| 722 |
key={pt.date}
|
| 723 |
className={`flex-1 rounded-t ${pt.equity >= 100000 ? 'bg-green-500/70' : 'bg-red-500/70'} ${isLast ? 'ring-1 ring-white/30' : ''}`}
|
| 724 |
style={{ height: `${Math.max(height, 2)}%` }}
|
| 725 |
-
title={`${pt.date}: ${
|
| 726 |
/>
|
| 727 |
)
|
| 728 |
})}
|
| 729 |
</div>
|
| 730 |
<div className="flex justify-between text-xs text-gray-500 mt-1">
|
| 731 |
-
<span>{
|
| 732 |
-
<span>{
|
| 733 |
</div>
|
| 734 |
</div>
|
| 735 |
)}
|
|
@@ -739,10 +496,10 @@ export default function AutoTradingPage() {
|
|
| 739 |
{/* Tab Navigation */}
|
| 740 |
<div className="flex gap-1 mb-4 bg-white p-1 rounded-lg overflow-x-auto">
|
| 741 |
{([
|
| 742 |
-
{ key: 'portfolio' as TabKey, label:
|
| 743 |
-
{ key: 'trades' as TabKey, label: `
|
| 744 |
-
{ key: 'signals' as TabKey, label: `
|
| 745 |
-
{ key: 'performance' as TabKey, label:
|
| 746 |
]).map(t => (
|
| 747 |
<button
|
| 748 |
key={t.key}
|
|
@@ -763,20 +520,19 @@ export default function AutoTradingPage() {
|
|
| 763 |
<div>
|
| 764 |
{/* ─── Position Charts (Live) ──────── */}
|
| 765 |
<div className="px-4 py-3 border-b border-gray-200 font-semibold">
|
| 766 |
-
|
| 767 |
</div>
|
| 768 |
-
{
|
| 769 |
-
<div className="px-4 py-8 text-center text-gray-500">
|
| 770 |
) : (
|
| 771 |
<div className="p-4 space-y-4">
|
| 772 |
-
{
|
| 773 |
<PositionChart
|
| 774 |
key={p.symbol}
|
| 775 |
symbol={p.symbol}
|
| 776 |
entryPrice={p.avgCost}
|
| 777 |
entryDate={p.entryDate}
|
| 778 |
quantity={p.quantity}
|
| 779 |
-
market={market}
|
| 780 |
priceRefreshMs={10_000}
|
| 781 |
chartRefreshMs={60_000}
|
| 782 |
/>
|
|
@@ -785,13 +541,13 @@ export default function AutoTradingPage() {
|
|
| 785 |
)}
|
| 786 |
|
| 787 |
{/* ─── Closed Trades with Charts ───── */}
|
| 788 |
-
{
|
| 789 |
<>
|
| 790 |
<div className="px-4 py-3 border-t border-b border-gray-200 font-semibold">
|
| 791 |
-
|
| 792 |
</div>
|
| 793 |
<div className="p-4 space-y-4">
|
| 794 |
-
{
|
| 795 |
<PositionChart
|
| 796 |
key={`${t.symbol}-${t.entryDate}-${i}`}
|
| 797 |
symbol={t.symbol}
|
|
@@ -801,7 +557,6 @@ export default function AutoTradingPage() {
|
|
| 801 |
exitPrice={t.exitPrice}
|
| 802 |
exitDate={t.exitDate}
|
| 803 |
exitReason={t.exitReason}
|
| 804 |
-
market={market}
|
| 805 |
priceRefreshMs={0}
|
| 806 |
chartRefreshMs={0}
|
| 807 |
/>
|
|
@@ -811,27 +566,27 @@ export default function AutoTradingPage() {
|
|
| 811 |
)}
|
| 812 |
|
| 813 |
{/* Open Trades from signals */}
|
| 814 |
-
{
|
| 815 |
<>
|
| 816 |
-
<div className="px-4 py-3 border-t border-b border-gray-200 font-semibold">
|
| 817 |
<div className="overflow-x-auto">
|
| 818 |
<table className="w-full text-sm">
|
| 819 |
<thead>
|
| 820 |
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 821 |
-
<th className="px-4 py-3 text-left">
|
| 822 |
-
<th className="px-4 py-3 text-right">
|
| 823 |
-
<th className="px-4 py-3 text-right">
|
| 824 |
-
<th className="px-4 py-3 text-right">
|
| 825 |
-
<th className="px-4 py-3 text-right">
|
| 826 |
-
<th className="px-4 py-3 text-left">
|
| 827 |
</tr>
|
| 828 |
</thead>
|
| 829 |
<tbody className="divide-y divide-gray-800/50">
|
| 830 |
-
{
|
| 831 |
<tr key={`${t.symbol}-${t.entryDate}`} className="hover:bg-gray-100/30">
|
| 832 |
<td className="px-4 py-3 font-medium text-gray-900">{t.symbol}</td>
|
| 833 |
<td className="px-4 py-3 text-right">{t.quantity}</td>
|
| 834 |
-
<td className="px-4 py-3 text-right">{
|
| 835 |
<td className="px-4 py-3 text-right text-blue-400">
|
| 836 |
%{(t.confidence * 100).toFixed(0)}
|
| 837 |
</td>
|
|
@@ -852,32 +607,32 @@ export default function AutoTradingPage() {
|
|
| 852 |
{activeTab === 'trades' && (
|
| 853 |
<div>
|
| 854 |
<div className="px-4 py-3 border-b border-gray-200 font-semibold">
|
| 855 |
-
|
| 856 |
</div>
|
| 857 |
-
{
|
| 858 |
-
<div className="px-4 py-8 text-center text-gray-500">
|
| 859 |
) : (
|
| 860 |
<div className="overflow-x-auto">
|
| 861 |
<table className="w-full text-sm">
|
| 862 |
<thead>
|
| 863 |
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 864 |
-
<th className="px-4 py-3 text-left">
|
| 865 |
-
<th className="px-4 py-3 text-right">
|
| 866 |
-
<th className="px-4 py-3 text-right">
|
| 867 |
<th className="px-4 py-3 text-right">P&L</th>
|
| 868 |
-
<th className="px-4 py-3 text-right">
|
| 869 |
-
<th className="px-4 py-3 text-right">
|
| 870 |
-
<th className="px-4 py-3 text-left">
|
| 871 |
</tr>
|
| 872 |
</thead>
|
| 873 |
<tbody className="divide-y divide-gray-800/50">
|
| 874 |
-
{
|
| 875 |
<tr key={`${t.symbol}-${t.entryDate}`} className="hover:bg-gray-100/30">
|
| 876 |
<td className="px-4 py-3 font-medium text-gray-900">{t.symbol}</td>
|
| 877 |
-
<td className="px-4 py-3 text-right">{
|
| 878 |
-
<td className="px-4 py-3 text-right">{
|
| 879 |
<td className={`px-4 py-3 text-right font-medium ${t.netPnl >= 0 ? 'text-green-400' : 'text-red-400'}`}>
|
| 880 |
-
{
|
| 881 |
</td>
|
| 882 |
<td className={`px-4 py-3 text-right ${t.returnPct >= 0 ? 'text-green-400' : 'text-red-400'}`}>
|
| 883 |
{fmtPct(t.returnPct)}
|
|
@@ -896,26 +651,26 @@ export default function AutoTradingPage() {
|
|
| 896 |
{activeTab === 'signals' && (
|
| 897 |
<div>
|
| 898 |
<div className="px-4 py-3 border-b border-gray-200 font-semibold">
|
| 899 |
-
|
| 900 |
</div>
|
| 901 |
-
{
|
| 902 |
-
<div className="px-4 py-8 text-center text-gray-500">
|
| 903 |
) : (
|
| 904 |
<div className="overflow-x-auto">
|
| 905 |
<table className="w-full text-sm">
|
| 906 |
<thead>
|
| 907 |
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 908 |
-
<th className="px-4 py-3 text-left">
|
| 909 |
-
<th className="px-4 py-3 text-left">
|
| 910 |
-
<th className="px-4 py-3 text-center">
|
| 911 |
<th className="px-4 py-3 text-center">ML</th>
|
| 912 |
-
<th className="px-4 py-3 text-center">
|
| 913 |
-
<th className="px-4 py-3 text-right">
|
| 914 |
-
<th className="px-4 py-3 text-left">
|
| 915 |
</tr>
|
| 916 |
</thead>
|
| 917 |
<tbody className="divide-y divide-gray-800/50">
|
| 918 |
-
{
|
| 919 |
<tr key={`${s.symbol}-${s.date}-${i}`} className="hover:bg-gray-100/30">
|
| 920 |
<td className="px-4 py-3 text-gray-500 text-xs">{s.date}</td>
|
| 921 |
<td className="px-4 py-3 font-medium text-gray-900">{s.symbol}</td>
|
|
@@ -931,9 +686,7 @@ export default function AutoTradingPage() {
|
|
| 931 |
<td className="px-4 py-3 text-right text-blue-400">
|
| 932 |
%{(s.confidence * 100).toFixed(0)}
|
| 933 |
</td>
|
| 934 |
-
<td className="px-4 py-3 text-xs text-gray-500">
|
| 935 |
-
<ActionBadge action={s.actionTaken} isUS={isUS} />
|
| 936 |
-
</td>
|
| 937 |
</tr>
|
| 938 |
))}
|
| 939 |
</tbody>
|
|
@@ -947,37 +700,37 @@ export default function AutoTradingPage() {
|
|
| 947 |
<div className="p-4 space-y-6">
|
| 948 |
{/* Stats Grid */}
|
| 949 |
<div className="grid grid-cols-2 md:grid-cols-4 gap-3">
|
| 950 |
-
<PerfCard label=
|
| 951 |
-
<PerfCard label="Win Rate" value={`%${
|
| 952 |
-
<PerfCard label="Profit Factor" value={`${
|
| 953 |
-
<PerfCard label=
|
| 954 |
-
<PerfCard label=
|
| 955 |
-
<PerfCard label=
|
| 956 |
-
<PerfCard label=
|
| 957 |
-
<PerfCard label=
|
| 958 |
</div>
|
| 959 |
|
| 960 |
{/* Symbol Breakdown */}
|
| 961 |
-
{
|
| 962 |
<div>
|
| 963 |
-
<h3 className="font-semibold mb-3 text-gray-600">
|
| 964 |
<div className="overflow-x-auto">
|
| 965 |
<table className="w-full text-sm">
|
| 966 |
<thead>
|
| 967 |
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 968 |
-
<th className="px-4 py-3 text-left">
|
| 969 |
<th className="px-4 py-3 text-right">Trade</th>
|
| 970 |
<th className="px-4 py-3 text-right">P&L</th>
|
| 971 |
<th className="px-4 py-3 text-right">Win Rate</th>
|
| 972 |
</tr>
|
| 973 |
</thead>
|
| 974 |
<tbody className="divide-y divide-gray-800/50">
|
| 975 |
-
{
|
| 976 |
<tr key={s.symbol} className="hover:bg-gray-100/30">
|
| 977 |
<td className="px-4 py-3 font-medium text-gray-900">{s.symbol}</td>
|
| 978 |
<td className="px-4 py-3 text-right">{s.trades}</td>
|
| 979 |
<td className={`px-4 py-3 text-right font-medium ${s.pnl >= 0 ? 'text-green-400' : 'text-red-400'}`}>
|
| 980 |
-
{
|
| 981 |
</td>
|
| 982 |
<td className="px-4 py-3 text-right">%{s.winRate}</td>
|
| 983 |
</tr>
|
|
@@ -1020,15 +773,6 @@ function PerfCard({ label, value, color }: { label: string; value: string; color
|
|
| 1020 |
)
|
| 1021 |
}
|
| 1022 |
|
| 1023 |
-
function MiniMetric({ label, value }: { label: string; value: string }) {
|
| 1024 |
-
return (
|
| 1025 |
-
<div className="rounded-lg border border-gray-200 bg-white px-3 py-2">
|
| 1026 |
-
<div className="text-[11px] uppercase tracking-wide text-gray-500">{label}</div>
|
| 1027 |
-
<div className="text-sm font-semibold text-gray-900">{value}</div>
|
| 1028 |
-
</div>
|
| 1029 |
-
)
|
| 1030 |
-
}
|
| 1031 |
-
|
| 1032 |
function SignalBadge({ signal }: { signal: string }) {
|
| 1033 |
const s = signal?.toUpperCase()
|
| 1034 |
if (s === 'BUY') return <span className="text-xs px-2 py-0.5 rounded-full bg-green-600/30 text-green-400">BUY</span>
|
|
@@ -1036,24 +780,3 @@ function SignalBadge({ signal }: { signal: string }) {
|
|
| 1036 |
if (s === 'HOLD') return <span className="text-xs px-2 py-0.5 rounded-full bg-gray-600/30 text-gray-500">HOLD</span>
|
| 1037 |
return <span className="text-xs text-gray-500">{signal || '—'}</span>
|
| 1038 |
}
|
| 1039 |
-
|
| 1040 |
-
function ActionBadge({ action, isUS = false }: { action: string; isUS?: boolean }) {
|
| 1041 |
-
const normalized = (action || '').toUpperCase()
|
| 1042 |
-
|
| 1043 |
-
let className = 'bg-gray-100 text-gray-600 border border-gray-200'
|
| 1044 |
-
if (normalized.startsWith('BUY_EXECUTED') || normalized.startsWith('SELL_EXECUTED')) {
|
| 1045 |
-
className = 'bg-green-50 text-green-700 border border-green-200'
|
| 1046 |
-
} else if (normalized.includes('BLOCKED') || normalized.includes('REJECTED')) {
|
| 1047 |
-
className = 'bg-red-50 text-red-700 border border-red-200'
|
| 1048 |
-
} else if (normalized.startsWith('BUY_SIGNAL') || normalized.startsWith('SELL_SIGNAL')) {
|
| 1049 |
-
className = 'bg-blue-50 text-blue-700 border border-blue-200'
|
| 1050 |
-
} else if (normalized.startsWith('BUY_SKIPPED') || normalized === 'SELL_NO_POSITION' || normalized === 'PENDING') {
|
| 1051 |
-
className = 'bg-amber-50 text-amber-700 border border-amber-200'
|
| 1052 |
-
}
|
| 1053 |
-
|
| 1054 |
-
return (
|
| 1055 |
-
<span className={`inline-flex items-center rounded-full px-2.5 py-1 text-[11px] font-medium ${className}`}>
|
| 1056 |
-
{describeSignalAction(action, isUS)}
|
| 1057 |
-
</span>
|
| 1058 |
-
)
|
| 1059 |
-
}
|
|
|
|
| 2 |
|
| 3 |
import { useState, useEffect, useCallback } from 'react'
|
| 4 |
import { fetchJson } from '@/lib/http'
|
|
|
|
| 5 |
import PositionChart from '@/components/PositionChart'
|
| 6 |
import {
|
| 7 |
Play,
|
|
|
|
| 119 |
actionTaken: string
|
| 120 |
}
|
| 121 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 122 |
interface TradingData {
|
| 123 |
status: TradingStatus
|
| 124 |
portfolio: Portfolio
|
|
|
|
| 128 |
performance: Performance
|
| 129 |
signals: Signal[]
|
| 130 |
eligibleStocks: string[]
|
|
|
|
|
|
|
| 131 |
timestamp: string
|
| 132 |
error?: string
|
| 133 |
}
|
| 134 |
|
| 135 |
// ─── Formatting Helpers ────────────────────
|
| 136 |
+
function fmtMoney(n: number) {
|
| 137 |
+
return new Intl.NumberFormat('tr-TR', { style: 'currency', currency: 'TRY' }).format(n)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 138 |
}
|
|
|
|
| 139 |
function fmtPct(n: number) {
|
| 140 |
return `%${n >= 0 ? '+' : ''}${n.toFixed(2)}`
|
| 141 |
}
|
| 142 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 143 |
// ─── Tab Type ──────────────────────────────
|
| 144 |
type TabKey = 'portfolio' | 'trades' | 'signals' | 'performance'
|
| 145 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 146 |
export default function AutoTradingPage() {
|
|
|
|
|
|
|
|
|
|
|
|
|
| 147 |
const [data, setData] = useState<TradingData | null>(null)
|
| 148 |
const [loading, setLoading] = useState(true)
|
| 149 |
const [error, setError] = useState<string | null>(null)
|
|
|
|
| 151 |
const [actionMessage, setActionMessage] = useState<string | null>(null)
|
| 152 |
const [activeTab, setActiveTab] = useState<TabKey>('portfolio')
|
| 153 |
const [showEquity, setShowEquity] = useState(true)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 154 |
|
| 155 |
const loadData = useCallback(async () => {
|
| 156 |
setLoading(true)
|
|
|
|
| 159 |
const res = await fetchJson<TradingData>('/api/trading', undefined, { timeoutMs: 20000 })
|
| 160 |
if (res.error) throw new Error(res.error)
|
| 161 |
setData(res)
|
|
|
|
|
|
|
|
|
|
| 162 |
} catch (e: unknown) {
|
| 163 |
+
setError(e instanceof Error ? e.message : 'Veriler alınamadı')
|
| 164 |
} finally {
|
| 165 |
setLoading(false)
|
| 166 |
}
|
| 167 |
+
}, [])
|
| 168 |
|
| 169 |
useEffect(() => {
|
| 170 |
loadData()
|
|
|
|
| 183 |
}>('/api/trading', {
|
| 184 |
method: 'POST',
|
| 185 |
headers: { 'Content-Type': 'application/json' },
|
| 186 |
+
body: JSON.stringify({ action }),
|
| 187 |
}, { timeoutMs: 130000 })
|
| 188 |
|
| 189 |
// Handle 403 gracefully (run/reset moved to worker)
|
|
|
|
| 194 |
if (res.error) throw new Error(res.error)
|
| 195 |
|
| 196 |
if (action === 'kill') {
|
| 197 |
+
setActionMessage('Kill Switch aktif edildi — worker yeni trade açmayacak')
|
| 198 |
} else if (action === 'unkill') {
|
| 199 |
+
setActionMessage('Kill Switch kaldırıldı — worker normal çalışmaya devam edecek')
|
| 200 |
} else if (action === 'start_worker') {
|
| 201 |
+
setActionMessage('Worker başlatıldı — BIST saatlerinde otomatik trade yapacak')
|
| 202 |
} else if (action === 'stop_worker') {
|
| 203 |
+
setActionMessage('Worker durduruldu')
|
| 204 |
} else if (action === 'run') {
|
| 205 |
+
setActionMessage('Trading döngüsü tamamlandı — veriler güncellendi')
|
| 206 |
} else if (action === 'run_force') {
|
| 207 |
+
setActionMessage('Zorla trading döngüsü tamamlandı — veriler güncellendi')
|
| 208 |
} else {
|
| 209 |
+
setActionMessage('İşlem tamamlandı')
|
| 210 |
}
|
| 211 |
|
| 212 |
// Reload data after action
|
| 213 |
await loadData()
|
| 214 |
} catch (e: unknown) {
|
| 215 |
+
const msg = e instanceof Error ? e.message : 'İşlem başarısız'
|
| 216 |
+
setActionMessage(`Hata: ${msg}`)
|
| 217 |
} finally {
|
| 218 |
setActionLoading(null)
|
| 219 |
}
|
|
|
|
| 225 |
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
|
| 226 |
<div className="flex flex-col items-center gap-3">
|
| 227 |
<RefreshCw className="w-8 h-8 text-blue-400 animate-spin" />
|
| 228 |
+
<p className="text-gray-500">Trading verileri yükleniyor...</p>
|
| 229 |
</div>
|
| 230 |
</div>
|
| 231 |
)
|
|
|
|
| 241 |
onClick={loadData}
|
| 242 |
className="px-4 py-2 bg-red-600 hover:bg-red-500 text-white rounded-lg text-sm"
|
| 243 |
>
|
| 244 |
+
Tekrar Dene
|
| 245 |
</button>
|
| 246 |
</div>
|
| 247 |
</div>
|
|
|
|
| 250 |
|
| 251 |
if (!data) return null
|
| 252 |
|
| 253 |
+
const { status, portfolio, performance, openTrades, closedTrades, signals, equityCurve, eligibleStocks } = data
|
| 254 |
+
const lastStatus = status.lastWorkerStatus || status.lastResult?.status || null
|
| 255 |
+
const lastReason = status.lastWorkerReason || status.lastResult?.reason || null
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 256 |
|
| 257 |
// ─── Main Render ────────────────────────
|
| 258 |
return (
|
|
|
|
| 262 |
<div>
|
| 263 |
<h1 className="text-2xl font-bold flex items-center gap-2">
|
| 264 |
<Zap className="w-7 h-7 text-yellow-400" />
|
| 265 |
+
Otomatik Trading
|
| 266 |
</h1>
|
| 267 |
<p className="text-gray-500 text-sm mt-1">
|
| 268 |
+
ML destekli paper trading sistemi
|
| 269 |
</p>
|
| 270 |
</div>
|
| 271 |
<div className="flex items-center gap-2 flex-wrap">
|
| 272 |
{/* Worker Status Indicator */}
|
| 273 |
<div className="flex items-center gap-2 px-4 py-2 bg-gray-100 rounded-lg text-sm">
|
| 274 |
+
<Server className={`w-4 h-4 ${status.workerRunning ? 'text-green-500' : 'text-gray-400'}`} />
|
| 275 |
+
<span className={status.workerRunning ? 'text-green-600 font-medium' : 'text-gray-500'}>
|
| 276 |
+
{status.workerRunning ? 'Worker Aktif' : 'Worker Bekliyor'}
|
| 277 |
</span>
|
| 278 |
</div>
|
| 279 |
|
| 280 |
{/* Kill Switch */}
|
| 281 |
+
{status.killSwitchActive ? (
|
| 282 |
<button
|
| 283 |
onClick={() => runAction('unkill')}
|
| 284 |
disabled={!!actionLoading}
|
| 285 |
className="flex items-center gap-2 px-4 py-2 bg-yellow-600 hover:bg-yellow-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 286 |
>
|
| 287 |
<Shield className="w-4 h-4" />
|
| 288 |
+
Kill Switch Kaldır
|
| 289 |
</button>
|
| 290 |
+
) : (
|
| 291 |
<button
|
| 292 |
onClick={() => runAction('kill')}
|
| 293 |
disabled={!!actionLoading}
|
|
|
|
| 296 |
<Square className="w-4 h-4" />
|
| 297 |
Kill Switch
|
| 298 |
</button>
|
| 299 |
+
)}
|
| 300 |
|
| 301 |
{/* Refresh */}
|
| 302 |
<button
|
|
|
|
| 308 |
</div>
|
| 309 |
</div>
|
| 310 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 311 |
{actionMessage && (
|
| 312 |
<div className={`border rounded-lg px-4 py-3 mb-6 text-sm ${
|
| 313 |
+
actionMessage.startsWith('Hata:') || actionMessage.includes('moved to')
|
| 314 |
? 'bg-amber-900/30 border-amber-600/50 text-amber-200'
|
| 315 |
: 'bg-white border-gray-200 text-gray-700'
|
| 316 |
}`}>
|
|
|
|
| 323 |
<div className="bg-white border border-gray-200 rounded-lg p-4 mb-6">
|
| 324 |
<h3 className="font-semibold text-gray-700 mb-2 flex items-center gap-2">
|
| 325 |
<Server className="w-5 h-5 text-blue-500" />
|
| 326 |
+
Worker Yönetimi
|
| 327 |
</h3>
|
| 328 |
<p className="text-sm text-gray-500 mb-4">
|
| 329 |
+
Worker, BIST saatlerinde (10:00-18:00) eligible hisseler için ML sinyalleri üretir ve otomatik paper trade yapar.
|
| 330 |
+
Günde bir kez çalışır. Stop-loss (%5), take-profit (%10) ve max tutma süresi (9 gün) ile pozisyonları yönetir.
|
|
|
|
|
|
|
| 331 |
</p>
|
| 332 |
<div className="flex flex-wrap gap-3">
|
| 333 |
+
{!status.workerRunning ? (
|
| 334 |
<button
|
| 335 |
onClick={() => runAction('start_worker')}
|
| 336 |
disabled={!!actionLoading}
|
| 337 |
className="flex items-center gap-2 px-4 py-2.5 bg-green-600 hover:bg-green-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 338 |
>
|
| 339 |
{actionLoading === 'start_worker' ? <RefreshCw className="w-4 h-4 animate-spin" /> : <Play className="w-4 h-4" />}
|
| 340 |
+
Worker Başlat (Daemon)
|
| 341 |
</button>
|
| 342 |
) : (
|
| 343 |
<button
|
|
|
|
| 346 |
className="flex items-center gap-2 px-4 py-2.5 bg-red-600 hover:bg-red-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 347 |
>
|
| 348 |
{actionLoading === 'stop_worker' ? <RefreshCw className="w-4 h-4 animate-spin" /> : <Square className="w-4 h-4" />}
|
| 349 |
+
Worker Durdur
|
| 350 |
</button>
|
| 351 |
)}
|
| 352 |
<button
|
| 353 |
onClick={() => runAction('run')}
|
| 354 |
+
disabled={!!actionLoading || status.workerRunning}
|
| 355 |
className="flex items-center gap-2 px-4 py-2.5 bg-blue-600 hover:bg-blue-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 356 |
+
title={status.workerRunning ? 'Worker çalışırken tek döngü çalıştırılamaz' : 'Bir kez çalıştırıp durur (~10 sn)'}
|
| 357 |
>
|
| 358 |
{actionLoading === 'run' ? <RefreshCw className="w-4 h-4 animate-spin" /> : <Zap className="w-4 h-4" />}
|
| 359 |
+
Tek Döngü Çalıştır
|
| 360 |
</button>
|
| 361 |
<button
|
| 362 |
onClick={() => runAction('run_force')}
|
| 363 |
+
disabled={!!actionLoading || status.workerRunning}
|
| 364 |
className="flex items-center gap-2 px-4 py-2.5 bg-yellow-600 hover:bg-yellow-500 text-white rounded-lg text-sm font-medium disabled:opacity-50"
|
| 365 |
+
title={status.workerRunning ? 'Worker çalışırken tek döngü çalıştırılamaz' : 'Aynı gün içinde tekrar çalıştırır (SKIPPED bypass)'}
|
| 366 |
>
|
| 367 |
{actionLoading === 'run_force' ? <RefreshCw className="w-4 h-4 animate-spin" /> : <RotateCcw className="w-4 h-4" />}
|
| 368 |
+
Zorla Tek Döngü
|
| 369 |
</button>
|
| 370 |
</div>
|
| 371 |
+
{status.workerRunning && status.workerPid && (
|
| 372 |
+
<p className="text-xs text-gray-400 mt-2">Worker PID: {status.workerPid}</p>
|
| 373 |
)}
|
| 374 |
</div>
|
| 375 |
|
| 376 |
{/* Kill Switch Warning */}
|
| 377 |
+
{status.killSwitchActive && (
|
| 378 |
<div className="bg-red-900/40 border border-red-500/60 rounded-lg p-4 mb-6 flex items-center gap-3">
|
| 379 |
<ShieldOff className="w-6 h-6 text-red-400 flex-shrink-0" />
|
| 380 |
<div>
|
| 381 |
+
<p className="text-red-300 font-medium">Kill Switch Aktif</p>
|
| 382 |
+
<p className="text-red-400/70 text-sm">{status.killSwitchReason || 'Manuel olarak durduruldu'}</p>
|
| 383 |
</div>
|
| 384 |
</div>
|
| 385 |
)}
|
|
|
|
| 388 |
<div className="grid grid-cols-2 md:grid-cols-4 lg:grid-cols-6 gap-3 mb-6">
|
| 389 |
<StatusCard
|
| 390 |
icon={<Activity className="w-5 h-5" />}
|
| 391 |
+
label="Durum"
|
| 392 |
+
value={status.isRunning ? 'Çalışıyor' : (lastStatus ? `Bekliyor — ${lastStatus}` : 'Bekliyor')}
|
| 393 |
+
color={status.isRunning ? 'text-green-400' : 'text-gray-500'}
|
| 394 |
/>
|
| 395 |
<StatusCard
|
| 396 |
icon={<Clock className="w-5 h-5" />}
|
| 397 |
+
label="Son Çalışma"
|
| 398 |
+
value={status.lastRunDate || '—'}
|
| 399 |
color="text-blue-400"
|
| 400 |
/>
|
| 401 |
<StatusCard
|
| 402 |
icon={<Wallet className="w-5 h-5" />}
|
| 403 |
+
label="Nakit"
|
| 404 |
+
value={fmtMoney(portfolio.cash)}
|
| 405 |
color="text-green-400"
|
| 406 |
/>
|
| 407 |
<StatusCard
|
| 408 |
icon={<DollarSign className="w-5 h-5" />}
|
| 409 |
+
label="Toplam Özkaynak"
|
| 410 |
+
value={fmtMoney(portfolio.equity)}
|
| 411 |
color="text-blue-400"
|
| 412 |
/>
|
| 413 |
<StatusCard
|
| 414 |
+
icon={portfolio.pnlPct >= 0 ? <TrendingUp className="w-5 h-5" /> : <TrendingDown className="w-5 h-5" />}
|
| 415 |
label="P&L"
|
| 416 |
+
value={fmtPct(portfolio.pnlPct)}
|
| 417 |
+
color={portfolio.pnlPct >= 0 ? 'text-green-400' : 'text-red-400'}
|
| 418 |
/>
|
| 419 |
<StatusCard
|
| 420 |
icon={<Target className="w-5 h-5" />}
|
| 421 |
+
label="Pozisyon"
|
| 422 |
+
value={`${portfolio.positionCount} açık`}
|
| 423 |
color="text-yellow-400"
|
| 424 |
/>
|
| 425 |
</div>
|
| 426 |
|
| 427 |
+
{!status.isRunning && (lastStatus || lastReason) && (
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 428 |
<div className="bg-white border border-gray-200 rounded-lg px-4 py-3 mb-6 text-sm text-gray-600">
|
| 429 |
+
<span className="font-medium text-gray-700">Son döngü:</span>{' '}
|
| 430 |
{lastStatus || '—'}
|
| 431 |
{lastReason ? ` — ${lastReason}` : ''}
|
| 432 |
</div>
|
| 433 |
)}
|
| 434 |
|
| 435 |
{/* Eligible Stocks (Trading Pool) */}
|
| 436 |
+
{eligibleStocks && eligibleStocks.length > 0 && (
|
| 437 |
<div className="bg-white border border-gray-200 rounded-lg p-4 mb-6">
|
| 438 |
<h3 className="font-semibold text-gray-700 mb-2 flex items-center gap-2">
|
| 439 |
<Target className="w-5 h-5 text-green-500" />
|
| 440 |
+
Trade Havuzu ({eligibleStocks.length} hisse)
|
| 441 |
</h3>
|
| 442 |
<p className="text-sm text-gray-500 mb-3">
|
| 443 |
+
ML tarama sonuçlarına göre alım yapılabilecek hisseler. Worker bu havuzdan günlük sinyal üretir ve uygun olanlara pozisyon açar.
|
|
|
|
|
|
|
|
|
|
| 444 |
</p>
|
| 445 |
<div className="flex flex-wrap gap-2">
|
| 446 |
+
{eligibleStocks.map((sym: string) => (
|
| 447 |
<span key={sym} className="px-3 py-1.5 bg-green-50 text-green-700 border border-green-200 rounded-full text-sm font-medium">
|
| 448 |
{sym}
|
| 449 |
</span>
|
|
|
|
| 453 |
)}
|
| 454 |
|
| 455 |
{/* Equity Curve (simple text-based) */}
|
| 456 |
+
{equityCurve.length > 0 && (
|
| 457 |
<div className="bg-white border border-gray-200 rounded-lg mb-6 overflow-hidden">
|
| 458 |
<button
|
| 459 |
onClick={() => setShowEquity(!showEquity)}
|
|
|
|
| 461 |
>
|
| 462 |
<span className="flex items-center gap-2 font-semibold">
|
| 463 |
<BarChart3 className="w-5 h-5 text-blue-400" />
|
| 464 |
+
Özkaynak Eğrisi ({equityCurve.length} gün)
|
| 465 |
</span>
|
| 466 |
{showEquity ? <ChevronUp className="w-5 h-5 text-gray-500" /> : <ChevronDown className="w-5 h-5 text-gray-500" />}
|
| 467 |
</button>
|
| 468 |
{showEquity && (
|
| 469 |
<div className="px-4 pb-4">
|
| 470 |
<div className="flex items-end gap-[2px] h-32">
|
| 471 |
+
{equityCurve.slice(-60).map((pt, i) => {
|
| 472 |
+
const min = Math.min(...equityCurve.slice(-60).map(e => e.equity))
|
| 473 |
+
const max = Math.max(...equityCurve.slice(-60).map(e => e.equity))
|
| 474 |
const range = max - min || 1
|
| 475 |
const height = ((pt.equity - min) / range) * 100
|
| 476 |
+
const isLast = i === equityCurve.slice(-60).length - 1
|
| 477 |
return (
|
| 478 |
<div
|
| 479 |
key={pt.date}
|
| 480 |
className={`flex-1 rounded-t ${pt.equity >= 100000 ? 'bg-green-500/70' : 'bg-red-500/70'} ${isLast ? 'ring-1 ring-white/30' : ''}`}
|
| 481 |
style={{ height: `${Math.max(height, 2)}%` }}
|
| 482 |
+
title={`${pt.date}: ${fmtMoney(pt.equity)}`}
|
| 483 |
/>
|
| 484 |
)
|
| 485 |
})}
|
| 486 |
</div>
|
| 487 |
<div className="flex justify-between text-xs text-gray-500 mt-1">
|
| 488 |
+
<span>{equityCurve.slice(-60)[0]?.date}</span>
|
| 489 |
+
<span>{equityCurve[equityCurve.length - 1]?.date}</span>
|
| 490 |
</div>
|
| 491 |
</div>
|
| 492 |
)}
|
|
|
|
| 496 |
{/* Tab Navigation */}
|
| 497 |
<div className="flex gap-1 mb-4 bg-white p-1 rounded-lg overflow-x-auto">
|
| 498 |
{([
|
| 499 |
+
{ key: 'portfolio' as TabKey, label: 'Portföy', icon: <Wallet className="w-4 h-4" /> },
|
| 500 |
+
{ key: 'trades' as TabKey, label: `Trade\'ler (${closedTrades.length})`, icon: <BarChart3 className="w-4 h-4" /> },
|
| 501 |
+
{ key: 'signals' as TabKey, label: `Sinyaller (${signals.length})`, icon: <Target className="w-4 h-4" /> },
|
| 502 |
+
{ key: 'performance' as TabKey, label: 'Performans', icon: <TrendingUp className="w-4 h-4" /> },
|
| 503 |
]).map(t => (
|
| 504 |
<button
|
| 505 |
key={t.key}
|
|
|
|
| 520 |
<div>
|
| 521 |
{/* ─── Position Charts (Live) ──────── */}
|
| 522 |
<div className="px-4 py-3 border-b border-gray-200 font-semibold">
|
| 523 |
+
Açık Pozisyonlar — Canlı Grafik
|
| 524 |
</div>
|
| 525 |
+
{portfolio.positions.length === 0 ? (
|
| 526 |
+
<div className="px-4 py-8 text-center text-gray-500">Açık pozisyon yok</div>
|
| 527 |
) : (
|
| 528 |
<div className="p-4 space-y-4">
|
| 529 |
+
{portfolio.positions.map(p => (
|
| 530 |
<PositionChart
|
| 531 |
key={p.symbol}
|
| 532 |
symbol={p.symbol}
|
| 533 |
entryPrice={p.avgCost}
|
| 534 |
entryDate={p.entryDate}
|
| 535 |
quantity={p.quantity}
|
|
|
|
| 536 |
priceRefreshMs={10_000}
|
| 537 |
chartRefreshMs={60_000}
|
| 538 |
/>
|
|
|
|
| 541 |
)}
|
| 542 |
|
| 543 |
{/* ─── Closed Trades with Charts ───── */}
|
| 544 |
+
{closedTrades.length > 0 && (
|
| 545 |
<>
|
| 546 |
<div className="px-4 py-3 border-t border-b border-gray-200 font-semibold">
|
| 547 |
+
Kapatılmış Pozisyonlar — Grafik
|
| 548 |
</div>
|
| 549 |
<div className="p-4 space-y-4">
|
| 550 |
+
{closedTrades.slice(0, 5).map((t, i) => (
|
| 551 |
<PositionChart
|
| 552 |
key={`${t.symbol}-${t.entryDate}-${i}`}
|
| 553 |
symbol={t.symbol}
|
|
|
|
| 557 |
exitPrice={t.exitPrice}
|
| 558 |
exitDate={t.exitDate}
|
| 559 |
exitReason={t.exitReason}
|
|
|
|
| 560 |
priceRefreshMs={0}
|
| 561 |
chartRefreshMs={0}
|
| 562 |
/>
|
|
|
|
| 566 |
)}
|
| 567 |
|
| 568 |
{/* Open Trades from signals */}
|
| 569 |
+
{openTrades.length > 0 && (
|
| 570 |
<>
|
| 571 |
+
<div className="px-4 py-3 border-t border-b border-gray-200 font-semibold">Açık Trade'ler</div>
|
| 572 |
<div className="overflow-x-auto">
|
| 573 |
<table className="w-full text-sm">
|
| 574 |
<thead>
|
| 575 |
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 576 |
+
<th className="px-4 py-3 text-left">Sembol</th>
|
| 577 |
+
<th className="px-4 py-3 text-right">Adet</th>
|
| 578 |
+
<th className="px-4 py-3 text-right">Giriş Fiyat</th>
|
| 579 |
+
<th className="px-4 py-3 text-right">Güven</th>
|
| 580 |
+
<th className="px-4 py-3 text-right">Tahmin Getiri</th>
|
| 581 |
+
<th className="px-4 py-3 text-left">Tarih</th>
|
| 582 |
</tr>
|
| 583 |
</thead>
|
| 584 |
<tbody className="divide-y divide-gray-800/50">
|
| 585 |
+
{openTrades.map((t, i) => (
|
| 586 |
<tr key={`${t.symbol}-${t.entryDate}`} className="hover:bg-gray-100/30">
|
| 587 |
<td className="px-4 py-3 font-medium text-gray-900">{t.symbol}</td>
|
| 588 |
<td className="px-4 py-3 text-right">{t.quantity}</td>
|
| 589 |
+
<td className="px-4 py-3 text-right">{fmtMoney(t.entryPrice)}</td>
|
| 590 |
<td className="px-4 py-3 text-right text-blue-400">
|
| 591 |
%{(t.confidence * 100).toFixed(0)}
|
| 592 |
</td>
|
|
|
|
| 607 |
{activeTab === 'trades' && (
|
| 608 |
<div>
|
| 609 |
<div className="px-4 py-3 border-b border-gray-200 font-semibold">
|
| 610 |
+
Kapatılmış Trade'ler ({closedTrades.length})
|
| 611 |
</div>
|
| 612 |
+
{closedTrades.length === 0 ? (
|
| 613 |
+
<div className="px-4 py-8 text-center text-gray-500">Henüz kapatılmış trade yok</div>
|
| 614 |
) : (
|
| 615 |
<div className="overflow-x-auto">
|
| 616 |
<table className="w-full text-sm">
|
| 617 |
<thead>
|
| 618 |
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 619 |
+
<th className="px-4 py-3 text-left">Sembol</th>
|
| 620 |
+
<th className="px-4 py-3 text-right">Giriş</th>
|
| 621 |
+
<th className="px-4 py-3 text-right">Çıkış</th>
|
| 622 |
<th className="px-4 py-3 text-right">P&L</th>
|
| 623 |
+
<th className="px-4 py-3 text-right">Getiri</th>
|
| 624 |
+
<th className="px-4 py-3 text-right">Gün</th>
|
| 625 |
+
<th className="px-4 py-3 text-left">Neden</th>
|
| 626 |
</tr>
|
| 627 |
</thead>
|
| 628 |
<tbody className="divide-y divide-gray-800/50">
|
| 629 |
+
{closedTrades.map((t, i) => (
|
| 630 |
<tr key={`${t.symbol}-${t.entryDate}`} className="hover:bg-gray-100/30">
|
| 631 |
<td className="px-4 py-3 font-medium text-gray-900">{t.symbol}</td>
|
| 632 |
+
<td className="px-4 py-3 text-right">{fmtMoney(t.entryPrice)}</td>
|
| 633 |
+
<td className="px-4 py-3 text-right">{fmtMoney(t.exitPrice)}</td>
|
| 634 |
<td className={`px-4 py-3 text-right font-medium ${t.netPnl >= 0 ? 'text-green-400' : 'text-red-400'}`}>
|
| 635 |
+
{fmtMoney(t.netPnl)}
|
| 636 |
</td>
|
| 637 |
<td className={`px-4 py-3 text-right ${t.returnPct >= 0 ? 'text-green-400' : 'text-red-400'}`}>
|
| 638 |
{fmtPct(t.returnPct)}
|
|
|
|
| 651 |
{activeTab === 'signals' && (
|
| 652 |
<div>
|
| 653 |
<div className="px-4 py-3 border-b border-gray-200 font-semibold">
|
| 654 |
+
Son Sinyaller ({signals.length})
|
| 655 |
</div>
|
| 656 |
+
{signals.length === 0 ? (
|
| 657 |
+
<div className="px-4 py-8 text-center text-gray-500">Sinyal kaydı yok</div>
|
| 658 |
) : (
|
| 659 |
<div className="overflow-x-auto">
|
| 660 |
<table className="w-full text-sm">
|
| 661 |
<thead>
|
| 662 |
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 663 |
+
<th className="px-4 py-3 text-left">Tarih</th>
|
| 664 |
+
<th className="px-4 py-3 text-left">Sembol</th>
|
| 665 |
+
<th className="px-4 py-3 text-center">Sinyal</th>
|
| 666 |
<th className="px-4 py-3 text-center">ML</th>
|
| 667 |
+
<th className="px-4 py-3 text-center">Teknik</th>
|
| 668 |
+
<th className="px-4 py-3 text-right">Güven</th>
|
| 669 |
+
<th className="px-4 py-3 text-left">Eylem</th>
|
| 670 |
</tr>
|
| 671 |
</thead>
|
| 672 |
<tbody className="divide-y divide-gray-800/50">
|
| 673 |
+
{signals.map((s, i) => (
|
| 674 |
<tr key={`${s.symbol}-${s.date}-${i}`} className="hover:bg-gray-100/30">
|
| 675 |
<td className="px-4 py-3 text-gray-500 text-xs">{s.date}</td>
|
| 676 |
<td className="px-4 py-3 font-medium text-gray-900">{s.symbol}</td>
|
|
|
|
| 686 |
<td className="px-4 py-3 text-right text-blue-400">
|
| 687 |
%{(s.confidence * 100).toFixed(0)}
|
| 688 |
</td>
|
| 689 |
+
<td className="px-4 py-3 text-xs text-gray-500">{s.actionTaken}</td>
|
|
|
|
|
|
|
| 690 |
</tr>
|
| 691 |
))}
|
| 692 |
</tbody>
|
|
|
|
| 700 |
<div className="p-4 space-y-6">
|
| 701 |
{/* Stats Grid */}
|
| 702 |
<div className="grid grid-cols-2 md:grid-cols-4 gap-3">
|
| 703 |
+
<PerfCard label="Toplam P&L" value={fmtMoney(performance.totalPnl)} color={performance.totalPnl >= 0 ? 'text-green-400' : 'text-red-400'} />
|
| 704 |
+
<PerfCard label="Win Rate" value={`%${performance.winRate}`} color={performance.winRate >= 50 ? 'text-green-400' : 'text-red-400'} />
|
| 705 |
+
<PerfCard label="Profit Factor" value={`${performance.profitFactor}`} color="text-blue-400" />
|
| 706 |
+
<PerfCard label="Trade Sayısı" value={`${performance.closedTradesCount}`} color="text-gray-600" />
|
| 707 |
+
<PerfCard label="Ort. Kazanç" value={fmtMoney(performance.avgWin)} color="text-green-400" />
|
| 708 |
+
<PerfCard label="Ort. Kayıp" value={fmtMoney(performance.avgLoss)} color="text-red-400" />
|
| 709 |
+
<PerfCard label="En İyi Trade" value={fmtPct(performance.bestTrade)} color="text-green-400" />
|
| 710 |
+
<PerfCard label="En Kötü Trade" value={fmtPct(performance.worstTrade)} color="text-red-400" />
|
| 711 |
</div>
|
| 712 |
|
| 713 |
{/* Symbol Breakdown */}
|
| 714 |
+
{performance.symbolBreakdown.length > 0 && (
|
| 715 |
<div>
|
| 716 |
+
<h3 className="font-semibold mb-3 text-gray-600">Sembol Bazlı Performans</h3>
|
| 717 |
<div className="overflow-x-auto">
|
| 718 |
<table className="w-full text-sm">
|
| 719 |
<thead>
|
| 720 |
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 721 |
+
<th className="px-4 py-3 text-left">Sembol</th>
|
| 722 |
<th className="px-4 py-3 text-right">Trade</th>
|
| 723 |
<th className="px-4 py-3 text-right">P&L</th>
|
| 724 |
<th className="px-4 py-3 text-right">Win Rate</th>
|
| 725 |
</tr>
|
| 726 |
</thead>
|
| 727 |
<tbody className="divide-y divide-gray-800/50">
|
| 728 |
+
{performance.symbolBreakdown.map(s => (
|
| 729 |
<tr key={s.symbol} className="hover:bg-gray-100/30">
|
| 730 |
<td className="px-4 py-3 font-medium text-gray-900">{s.symbol}</td>
|
| 731 |
<td className="px-4 py-3 text-right">{s.trades}</td>
|
| 732 |
<td className={`px-4 py-3 text-right font-medium ${s.pnl >= 0 ? 'text-green-400' : 'text-red-400'}`}>
|
| 733 |
+
{fmtMoney(s.pnl)}
|
| 734 |
</td>
|
| 735 |
<td className="px-4 py-3 text-right">%{s.winRate}</td>
|
| 736 |
</tr>
|
|
|
|
| 773 |
)
|
| 774 |
}
|
| 775 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 776 |
function SignalBadge({ signal }: { signal: string }) {
|
| 777 |
const s = signal?.toUpperCase()
|
| 778 |
if (s === 'BUY') return <span className="text-xs px-2 py-0.5 rounded-full bg-green-600/30 text-green-400">BUY</span>
|
|
|
|
| 780 |
if (s === 'HOLD') return <span className="text-xs px-2 py-0.5 rounded-full bg-gray-600/30 text-gray-500">HOLD</span>
|
| 781 |
return <span className="text-xs text-gray-500">{signal || '—'}</span>
|
| 782 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
huggingface-space/nextjs-app/src/app/eligible/page.tsx
CHANGED
|
@@ -2,7 +2,6 @@
|
|
| 2 |
|
| 3 |
import { useState, useEffect, useMemo, useCallback } from 'react'
|
| 4 |
import { fetchJson } from '@/lib/http'
|
| 5 |
-
import { useMarket } from '@/contexts/MarketContext'
|
| 6 |
import {
|
| 7 |
Shield,
|
| 8 |
ShieldOff,
|
|
@@ -222,10 +221,6 @@ function GaugeIndicator({ value, min, max, label, unit = '', invertColor = false
|
|
| 222 |
// ─── Main Component ────────────────────────
|
| 223 |
|
| 224 |
export default function EligiblePage() {
|
| 225 |
-
const { market } = useMarket()
|
| 226 |
-
const isUS = market === 'us'
|
| 227 |
-
const currencySymbol = isUS ? '$' : '₺'
|
| 228 |
-
const t = (tr: string, en: string) => isUS ? en : tr
|
| 229 |
const [data, setData] = useState<EligibleData | null>(null)
|
| 230 |
const [signals, setSignals] = useState<SignalsData | null>(null)
|
| 231 |
const [loading, setLoading] = useState(true)
|
|
@@ -249,7 +244,7 @@ export default function EligiblePage() {
|
|
| 249 |
setLoading(true)
|
| 250 |
setError(null)
|
| 251 |
try {
|
| 252 |
-
const res = await fetchJson<EligibleData>(
|
| 253 |
// ok: false means no scan results yet — treat as empty data, not error
|
| 254 |
setData(res)
|
| 255 |
return res
|
|
@@ -259,7 +254,7 @@ export default function EligiblePage() {
|
|
| 259 |
} finally {
|
| 260 |
setLoading(false)
|
| 261 |
}
|
| 262 |
-
}, [
|
| 263 |
|
| 264 |
const loadSignals = useCallback(async (symbols: string[]) => {
|
| 265 |
if (!symbols.length) return
|
|
@@ -267,7 +262,7 @@ export default function EligiblePage() {
|
|
| 267 |
try {
|
| 268 |
const res = await fetchJson<SignalsData>(
|
| 269 |
'/api/trading-signals',
|
| 270 |
-
{ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ symbols
|
| 271 |
{ timeoutMs: 60000 }
|
| 272 |
)
|
| 273 |
if (res.ok) setSignals(res)
|
|
@@ -276,20 +271,20 @@ export default function EligiblePage() {
|
|
| 276 |
} finally {
|
| 277 |
setSignalsLoading(false)
|
| 278 |
}
|
| 279 |
-
}, [
|
| 280 |
|
| 281 |
-
const startScan = useCallback(async (universe: string =
|
| 282 |
setScanLoading(true)
|
| 283 |
try {
|
| 284 |
await fetchJson('/api/trading', {
|
| 285 |
method: 'POST',
|
| 286 |
headers: { 'Content-Type': 'application/json' },
|
| 287 |
-
body: JSON.stringify({ action: 'scan', reason: universe
|
| 288 |
})
|
| 289 |
// Poll for results every 15 seconds
|
| 290 |
const poll = setInterval(async () => {
|
| 291 |
try {
|
| 292 |
-
const res = await fetchJson<EligibleData>(
|
| 293 |
setData(res) // always update UI with latest progress
|
| 294 |
// Stop polling when: scan finished, or ok:true with results, or scan manually ended
|
| 295 |
const scanDone = res.scanCompleted || (!res.scanRunning && !scanLoading)
|
|
@@ -313,13 +308,11 @@ export default function EligiblePage() {
|
|
| 313 |
}, 90 * 60 * 1000)
|
| 314 |
} catch (e: unknown) {
|
| 315 |
setScanLoading(false)
|
| 316 |
-
setError(e instanceof Error ? e.message :
|
| 317 |
}
|
| 318 |
-
}, [
|
| 319 |
|
| 320 |
useEffect(() => {
|
| 321 |
-
setData(null)
|
| 322 |
-
setSignals(null)
|
| 323 |
loadEligible().then((res) => {
|
| 324 |
if (res?.eligible?.length) loadSignals(res.eligible.map((e) => e.symbol))
|
| 325 |
})
|
|
@@ -382,7 +375,7 @@ export default function EligiblePage() {
|
|
| 382 |
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
|
| 383 |
<div className="flex flex-col items-center gap-3">
|
| 384 |
<RefreshCw className="w-8 h-8 text-blue-400 animate-spin" />
|
| 385 |
-
<p className="text-gray-500">
|
| 386 |
</div>
|
| 387 |
</div>
|
| 388 |
)
|
|
@@ -485,7 +478,7 @@ export default function EligiblePage() {
|
|
| 485 |
</p>
|
| 486 |
)}
|
| 487 |
<p className="text-gray-400 text-xs mt-0.5 flex items-center gap-1">
|
| 488 |
-
<Clock className="w-3 h-3" />
|
| 489 |
</p>
|
| 490 |
</div>
|
| 491 |
<div className="flex items-center gap-2">
|
|
@@ -509,33 +502,33 @@ export default function EligiblePage() {
|
|
| 509 |
{showScanMenu && !isScanRunning && (
|
| 510 |
<div className="absolute right-0 top-full mt-1 bg-white border border-gray-200 rounded-lg shadow-lg z-20 py-1 min-w-[200px]">
|
| 511 |
<button
|
| 512 |
-
onClick={() => { setShowScanMenu(false); startScan(
|
| 513 |
className="w-full px-4 py-2.5 text-left text-sm hover:bg-gray-50 flex items-center gap-2"
|
| 514 |
>
|
| 515 |
<Zap className="w-4 h-4 text-blue-500" />
|
| 516 |
<div>
|
| 517 |
-
<div className="font-medium">
|
| 518 |
-
<div className="text-xs text-gray-400">
|
| 519 |
</div>
|
| 520 |
</button>
|
| 521 |
<button
|
| 522 |
-
onClick={() => { setShowScanMenu(false); startScan(
|
| 523 |
className="w-full px-4 py-2.5 text-left text-sm hover:bg-gray-50 flex items-center gap-2"
|
| 524 |
>
|
| 525 |
<BarChart3 className="w-4 h-4 text-green-500" />
|
| 526 |
<div>
|
| 527 |
-
<div className="font-medium">
|
| 528 |
-
<div className="text-xs text-gray-400">
|
| 529 |
</div>
|
| 530 |
</button>
|
| 531 |
<button
|
| 532 |
-
onClick={() => { setShowScanMenu(false); startScan(
|
| 533 |
className="w-full px-4 py-2.5 text-left text-sm hover:bg-gray-50 flex items-center gap-2 border-t border-gray-100"
|
| 534 |
>
|
| 535 |
<Activity className="w-4 h-4 text-purple-500" />
|
| 536 |
<div>
|
| 537 |
-
<div className="font-medium">
|
| 538 |
-
<div className="text-xs text-gray-400">
|
| 539 |
</div>
|
| 540 |
</button>
|
| 541 |
</div>
|
|
@@ -657,15 +650,15 @@ export default function EligiblePage() {
|
|
| 657 |
<div className="flex items-center justify-between mb-3">
|
| 658 |
<div>
|
| 659 |
<div className="text-xs text-gray-500">Son Fiyat</div>
|
| 660 |
-
<div className="text-lg font-bold text-gray-900">{
|
| 661 |
</div>
|
| 662 |
<ScoreBar score={sig.signalScore} />
|
| 663 |
</div>
|
| 664 |
{(sig.entryPrice || sig.targetPrice || sig.stopLoss) && (
|
| 665 |
<div className="grid grid-cols-3 gap-2 mb-3">
|
| 666 |
-
<PriceBox label="Giriş" value={sig.entryPrice} color="text-blue-400"
|
| 667 |
-
<PriceBox label="Hedef" value={sig.targetPrice} color="text-green-400"
|
| 668 |
-
<PriceBox label="Stop" value={sig.stopLoss} color="text-red-400"
|
| 669 |
</div>
|
| 670 |
)}
|
| 671 |
<div className="flex items-center justify-between text-xs">
|
|
@@ -720,10 +713,10 @@ export default function EligiblePage() {
|
|
| 720 |
{sig ? <span className={`inline-flex items-center gap-1 text-xs px-2 py-0.5 rounded-full font-bold ${signalColor(sig.signal)}`}>{signalIcon(sig.signal)} {sig.signal}</span>
|
| 721 |
: signalsLoading ? <RefreshCw className="w-3 h-3 text-gray-600 animate-spin" /> : '—'}
|
| 722 |
</td>
|
| 723 |
-
<td className="px-3 py-2.5 text-right font-mono">{sig ? `${
|
| 724 |
-
<td className="px-3 py-2.5 text-right font-mono text-blue-400">{sig?.entryPrice ? `${
|
| 725 |
-
<td className="px-3 py-2.5 text-right font-mono text-green-400">{sig?.targetPrice ? `${
|
| 726 |
-
<td className="px-3 py-2.5 text-right font-mono text-red-400">{sig?.stopLoss ? `${
|
| 727 |
<td className="px-3 py-2.5 text-right">
|
| 728 |
{sig?.potentialReturn != null ? <span className={sig.potentialReturn > 0 ? 'text-green-400 font-semibold' : 'text-red-400'}>%{sig.potentialReturn.toFixed(1)}</span> : '—'}
|
| 729 |
</td>
|
|
@@ -752,7 +745,7 @@ export default function EligiblePage() {
|
|
| 752 |
)}
|
| 753 |
|
| 754 |
{/* ─── DETAIL MODAL ─── */}
|
| 755 |
-
{selectedSignal && <DetailModal stock={selectedSignal} onClose={() => setSelectedStock(null)}
|
| 756 |
|
| 757 |
{/* Excluded & Stage-1 sections */}
|
| 758 |
<div className="space-y-4 mt-6">
|
|
@@ -813,11 +806,11 @@ function SummaryCard({ icon, value, label, color }: { icon: React.ReactNode; val
|
|
| 813 |
)
|
| 814 |
}
|
| 815 |
|
| 816 |
-
function PriceBox({ label, value, color
|
| 817 |
return (
|
| 818 |
<div className="bg-gray-50 rounded p-2 text-center">
|
| 819 |
<div className="text-[10px] text-gray-500">{label}</div>
|
| 820 |
-
<div className={`text-sm font-mono font-semibold ${color}`}>{value ? `${
|
| 821 |
</div>
|
| 822 |
)
|
| 823 |
}
|
|
@@ -848,10 +841,9 @@ function CollapsibleSection({ title, icon, open, onToggle, children }: {
|
|
| 848 |
|
| 849 |
// ─── Detail Modal ──────────────────────────
|
| 850 |
|
| 851 |
-
function DetailModal({ stock, onClose
|
| 852 |
stock: { symbol: string; sharpe: number; annual_return: number; hit_rate: number; quality: string; signal: TradingSignal | null }
|
| 853 |
onClose: () => void
|
| 854 |
-
currencySymbol: string
|
| 855 |
}) {
|
| 856 |
const sig = stock.signal
|
| 857 |
|
|
@@ -898,10 +890,10 @@ function DetailModal({ stock, onClose, currencySymbol }: {
|
|
| 898 |
<div>
|
| 899 |
<h3 className="text-sm font-semibold text-gray-500 mb-3 flex items-center gap-2"><Target className="w-4 h-4" /> Trading Planı</h3>
|
| 900 |
<div className="grid grid-cols-2 md:grid-cols-4 gap-3">
|
| 901 |
-
<TradingPlanCard label="Son Fiyat" value={`${
|
| 902 |
-
<TradingPlanCard label="Giriş Fiyatı" value={sig.entryPrice ? `${
|
| 903 |
-
<TradingPlanCard label="Hedef Fiyat" value={sig.targetPrice ? `${
|
| 904 |
-
<TradingPlanCard label="Stop Loss" value={sig.stopLoss ? `${
|
| 905 |
</div>
|
| 906 |
{sig.riskReward && <div className="mt-2 text-xs text-gray-500 text-center">Risk : Ödül = 1 : {sig.riskReward.toFixed(1)}</div>}
|
| 907 |
</div>
|
|
@@ -930,18 +922,18 @@ function DetailModal({ stock, onClose, currencySymbol }: {
|
|
| 930 |
</div>
|
| 931 |
{/* Moving Averages */}
|
| 932 |
<div className="mt-3 grid grid-cols-3 md:grid-cols-5 gap-2">
|
| 933 |
-
{sig.indicators.sma20 && <MABadge label="SMA20" value={sig.indicators.sma20} lastPrice={sig.lastPrice}
|
| 934 |
-
{sig.indicators.sma50 && <MABadge label="SMA50" value={sig.indicators.sma50} lastPrice={sig.lastPrice}
|
| 935 |
-
{sig.indicators.sma200 && <MABadge label="SMA200" value={sig.indicators.sma200} lastPrice={sig.lastPrice}
|
| 936 |
-
{sig.indicators.ema12 && <MABadge label="EMA12" value={sig.indicators.ema12} lastPrice={sig.lastPrice}
|
| 937 |
-
{sig.indicators.ema26 && <MABadge label="EMA26" value={sig.indicators.ema26} lastPrice={sig.lastPrice}
|
| 938 |
</div>
|
| 939 |
{/* Bollinger */}
|
| 940 |
{sig.indicators.bollinger && (
|
| 941 |
<div className="mt-3 flex items-center gap-4 justify-center text-xs">
|
| 942 |
-
<span className="text-red-400">BB Alt: {
|
| 943 |
-
<span className="text-yellow-400">BB Orta: {
|
| 944 |
-
<span className="text-green-400">BB Üst: {
|
| 945 |
</div>
|
| 946 |
)}
|
| 947 |
</div>
|
|
@@ -992,12 +984,12 @@ function TradingPlanCard({ label, value, color, subtitle }: { label: string; val
|
|
| 992 |
)
|
| 993 |
}
|
| 994 |
|
| 995 |
-
function MABadge({ label, value, lastPrice
|
| 996 |
const above = lastPrice > value
|
| 997 |
return (
|
| 998 |
<div className={`text-center rounded px-2 py-1.5 ${above ? 'bg-green-900/30' : 'bg-red-900/30'}`}>
|
| 999 |
<div className="text-[10px] text-gray-500">{label}</div>
|
| 1000 |
-
<div className="text-xs font-mono">{
|
| 1001 |
<div className={`text-[9px] ${above ? 'text-green-500' : 'text-red-500'}`}>{above ? '▲ Üstünde' : '▼ Altında'}</div>
|
| 1002 |
</div>
|
| 1003 |
)
|
|
|
|
| 2 |
|
| 3 |
import { useState, useEffect, useMemo, useCallback } from 'react'
|
| 4 |
import { fetchJson } from '@/lib/http'
|
|
|
|
| 5 |
import {
|
| 6 |
Shield,
|
| 7 |
ShieldOff,
|
|
|
|
| 221 |
// ─── Main Component ────────────────────────
|
| 222 |
|
| 223 |
export default function EligiblePage() {
|
|
|
|
|
|
|
|
|
|
|
|
|
| 224 |
const [data, setData] = useState<EligibleData | null>(null)
|
| 225 |
const [signals, setSignals] = useState<SignalsData | null>(null)
|
| 226 |
const [loading, setLoading] = useState(true)
|
|
|
|
| 244 |
setLoading(true)
|
| 245 |
setError(null)
|
| 246 |
try {
|
| 247 |
+
const res = await fetchJson<EligibleData>('/api/eligible', undefined, { timeoutMs: 15000 })
|
| 248 |
// ok: false means no scan results yet — treat as empty data, not error
|
| 249 |
setData(res)
|
| 250 |
return res
|
|
|
|
| 254 |
} finally {
|
| 255 |
setLoading(false)
|
| 256 |
}
|
| 257 |
+
}, [])
|
| 258 |
|
| 259 |
const loadSignals = useCallback(async (symbols: string[]) => {
|
| 260 |
if (!symbols.length) return
|
|
|
|
| 262 |
try {
|
| 263 |
const res = await fetchJson<SignalsData>(
|
| 264 |
'/api/trading-signals',
|
| 265 |
+
{ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ symbols }) },
|
| 266 |
{ timeoutMs: 60000 }
|
| 267 |
)
|
| 268 |
if (res.ok) setSignals(res)
|
|
|
|
| 271 |
} finally {
|
| 272 |
setSignalsLoading(false)
|
| 273 |
}
|
| 274 |
+
}, [])
|
| 275 |
|
| 276 |
+
const startScan = useCallback(async (universe: string = 'bist30') => {
|
| 277 |
setScanLoading(true)
|
| 278 |
try {
|
| 279 |
await fetchJson('/api/trading', {
|
| 280 |
method: 'POST',
|
| 281 |
headers: { 'Content-Type': 'application/json' },
|
| 282 |
+
body: JSON.stringify({ action: 'scan', reason: universe }),
|
| 283 |
})
|
| 284 |
// Poll for results every 15 seconds
|
| 285 |
const poll = setInterval(async () => {
|
| 286 |
try {
|
| 287 |
+
const res = await fetchJson<EligibleData>('/api/eligible', undefined, { timeoutMs: 10000 })
|
| 288 |
setData(res) // always update UI with latest progress
|
| 289 |
// Stop polling when: scan finished, or ok:true with results, or scan manually ended
|
| 290 |
const scanDone = res.scanCompleted || (!res.scanRunning && !scanLoading)
|
|
|
|
| 308 |
}, 90 * 60 * 1000)
|
| 309 |
} catch (e: unknown) {
|
| 310 |
setScanLoading(false)
|
| 311 |
+
setError(e instanceof Error ? e.message : 'Tarama başlatılamadı')
|
| 312 |
}
|
| 313 |
+
}, [loadEligible, loadSignals])
|
| 314 |
|
| 315 |
useEffect(() => {
|
|
|
|
|
|
|
| 316 |
loadEligible().then((res) => {
|
| 317 |
if (res?.eligible?.length) loadSignals(res.eligible.map((e) => e.symbol))
|
| 318 |
})
|
|
|
|
| 375 |
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
|
| 376 |
<div className="flex flex-col items-center gap-3">
|
| 377 |
<RefreshCw className="w-8 h-8 text-blue-400 animate-spin" />
|
| 378 |
+
<p className="text-gray-500">Eligible hisseler yükleniyor...</p>
|
| 379 |
</div>
|
| 380 |
</div>
|
| 381 |
)
|
|
|
|
| 478 |
</p>
|
| 479 |
)}
|
| 480 |
<p className="text-gray-400 text-xs mt-0.5 flex items-center gap-1">
|
| 481 |
+
<Clock className="w-3 h-3" /> Otomatik tarama: Her gün 19:00 (TR) — Tüm BIST
|
| 482 |
</p>
|
| 483 |
</div>
|
| 484 |
<div className="flex items-center gap-2">
|
|
|
|
| 502 |
{showScanMenu && !isScanRunning && (
|
| 503 |
<div className="absolute right-0 top-full mt-1 bg-white border border-gray-200 rounded-lg shadow-lg z-20 py-1 min-w-[200px]">
|
| 504 |
<button
|
| 505 |
+
onClick={() => { setShowScanMenu(false); startScan('bist30') }}
|
| 506 |
className="w-full px-4 py-2.5 text-left text-sm hover:bg-gray-50 flex items-center gap-2"
|
| 507 |
>
|
| 508 |
<Zap className="w-4 h-4 text-blue-500" />
|
| 509 |
<div>
|
| 510 |
+
<div className="font-medium">BIST30 Hızlı Tara</div>
|
| 511 |
+
<div className="text-xs text-gray-400">~15-30 dakika</div>
|
| 512 |
</div>
|
| 513 |
</button>
|
| 514 |
<button
|
| 515 |
+
onClick={() => { setShowScanMenu(false); startScan('bist100') }}
|
| 516 |
className="w-full px-4 py-2.5 text-left text-sm hover:bg-gray-50 flex items-center gap-2"
|
| 517 |
>
|
| 518 |
<BarChart3 className="w-4 h-4 text-green-500" />
|
| 519 |
<div>
|
| 520 |
+
<div className="font-medium">BIST100 Tam Tara</div>
|
| 521 |
+
<div className="text-xs text-gray-400">~60-90 dakika</div>
|
| 522 |
</div>
|
| 523 |
</button>
|
| 524 |
<button
|
| 525 |
+
onClick={() => { setShowScanMenu(false); startScan('all') }}
|
| 526 |
className="w-full px-4 py-2.5 text-left text-sm hover:bg-gray-50 flex items-center gap-2 border-t border-gray-100"
|
| 527 |
>
|
| 528 |
<Activity className="w-4 h-4 text-purple-500" />
|
| 529 |
<div>
|
| 530 |
+
<div className="font-medium">Tüm BIST Tara</div>
|
| 531 |
+
<div className="text-xs text-gray-400">~2-4 saat</div>
|
| 532 |
</div>
|
| 533 |
</button>
|
| 534 |
</div>
|
|
|
|
| 650 |
<div className="flex items-center justify-between mb-3">
|
| 651 |
<div>
|
| 652 |
<div className="text-xs text-gray-500">Son Fiyat</div>
|
| 653 |
+
<div className="text-lg font-bold text-gray-900">₺{sig.lastPrice.toFixed(2)}</div>
|
| 654 |
</div>
|
| 655 |
<ScoreBar score={sig.signalScore} />
|
| 656 |
</div>
|
| 657 |
{(sig.entryPrice || sig.targetPrice || sig.stopLoss) && (
|
| 658 |
<div className="grid grid-cols-3 gap-2 mb-3">
|
| 659 |
+
<PriceBox label="Giriş" value={sig.entryPrice} color="text-blue-400" />
|
| 660 |
+
<PriceBox label="Hedef" value={sig.targetPrice} color="text-green-400" />
|
| 661 |
+
<PriceBox label="Stop" value={sig.stopLoss} color="text-red-400" />
|
| 662 |
</div>
|
| 663 |
)}
|
| 664 |
<div className="flex items-center justify-between text-xs">
|
|
|
|
| 713 |
{sig ? <span className={`inline-flex items-center gap-1 text-xs px-2 py-0.5 rounded-full font-bold ${signalColor(sig.signal)}`}>{signalIcon(sig.signal)} {sig.signal}</span>
|
| 714 |
: signalsLoading ? <RefreshCw className="w-3 h-3 text-gray-600 animate-spin" /> : '—'}
|
| 715 |
</td>
|
| 716 |
+
<td className="px-3 py-2.5 text-right font-mono">{sig ? `₺${sig.lastPrice.toFixed(2)}` : '—'}</td>
|
| 717 |
+
<td className="px-3 py-2.5 text-right font-mono text-blue-400">{sig?.entryPrice ? `₺${sig.entryPrice.toFixed(2)}` : '—'}</td>
|
| 718 |
+
<td className="px-3 py-2.5 text-right font-mono text-green-400">{sig?.targetPrice ? `₺${sig.targetPrice.toFixed(2)}` : '—'}</td>
|
| 719 |
+
<td className="px-3 py-2.5 text-right font-mono text-red-400">{sig?.stopLoss ? `₺${sig.stopLoss.toFixed(2)}` : '—'}</td>
|
| 720 |
<td className="px-3 py-2.5 text-right">
|
| 721 |
{sig?.potentialReturn != null ? <span className={sig.potentialReturn > 0 ? 'text-green-400 font-semibold' : 'text-red-400'}>%{sig.potentialReturn.toFixed(1)}</span> : '—'}
|
| 722 |
</td>
|
|
|
|
| 745 |
)}
|
| 746 |
|
| 747 |
{/* ─── DETAIL MODAL ─── */}
|
| 748 |
+
{selectedSignal && <DetailModal stock={selectedSignal} onClose={() => setSelectedStock(null)} />}
|
| 749 |
|
| 750 |
{/* Excluded & Stage-1 sections */}
|
| 751 |
<div className="space-y-4 mt-6">
|
|
|
|
| 806 |
)
|
| 807 |
}
|
| 808 |
|
| 809 |
+
function PriceBox({ label, value, color }: { label: string; value: number | null; color: string }) {
|
| 810 |
return (
|
| 811 |
<div className="bg-gray-50 rounded p-2 text-center">
|
| 812 |
<div className="text-[10px] text-gray-500">{label}</div>
|
| 813 |
+
<div className={`text-sm font-mono font-semibold ${color}`}>{value ? `₺${value.toFixed(2)}` : '—'}</div>
|
| 814 |
</div>
|
| 815 |
)
|
| 816 |
}
|
|
|
|
| 841 |
|
| 842 |
// ─── Detail Modal ──────────────────────────
|
| 843 |
|
| 844 |
+
function DetailModal({ stock, onClose }: {
|
| 845 |
stock: { symbol: string; sharpe: number; annual_return: number; hit_rate: number; quality: string; signal: TradingSignal | null }
|
| 846 |
onClose: () => void
|
|
|
|
| 847 |
}) {
|
| 848 |
const sig = stock.signal
|
| 849 |
|
|
|
|
| 890 |
<div>
|
| 891 |
<h3 className="text-sm font-semibold text-gray-500 mb-3 flex items-center gap-2"><Target className="w-4 h-4" /> Trading Planı</h3>
|
| 892 |
<div className="grid grid-cols-2 md:grid-cols-4 gap-3">
|
| 893 |
+
<TradingPlanCard label="Son Fiyat" value={`₺${sig.lastPrice.toFixed(2)}`} color="text-white" />
|
| 894 |
+
<TradingPlanCard label="Giriş Fiyatı" value={sig.entryPrice ? `₺${sig.entryPrice.toFixed(2)}` : '—'} color="text-blue-400" subtitle={sig.entryPrice ? `Güncel fiyattan %${(((sig.entryPrice - sig.lastPrice) / sig.lastPrice) * 100).toFixed(1)}` : undefined} />
|
| 895 |
+
<TradingPlanCard label="Hedef Fiyat" value={sig.targetPrice ? `₺${sig.targetPrice.toFixed(2)}` : '—'} color="text-green-400" subtitle={sig.potentialReturn ? `Potansiyel: %${sig.potentialReturn.toFixed(1)}` : undefined} />
|
| 896 |
+
<TradingPlanCard label="Stop Loss" value={sig.stopLoss ? `₺${sig.stopLoss.toFixed(2)}` : '—'} color="text-red-400" subtitle={sig.stopLoss && sig.entryPrice ? `Risk: %${(((sig.entryPrice - sig.stopLoss) / sig.entryPrice) * 100).toFixed(1)}` : undefined} />
|
| 897 |
</div>
|
| 898 |
{sig.riskReward && <div className="mt-2 text-xs text-gray-500 text-center">Risk : Ödül = 1 : {sig.riskReward.toFixed(1)}</div>}
|
| 899 |
</div>
|
|
|
|
| 922 |
</div>
|
| 923 |
{/* Moving Averages */}
|
| 924 |
<div className="mt-3 grid grid-cols-3 md:grid-cols-5 gap-2">
|
| 925 |
+
{sig.indicators.sma20 && <MABadge label="SMA20" value={sig.indicators.sma20} lastPrice={sig.lastPrice} />}
|
| 926 |
+
{sig.indicators.sma50 && <MABadge label="SMA50" value={sig.indicators.sma50} lastPrice={sig.lastPrice} />}
|
| 927 |
+
{sig.indicators.sma200 && <MABadge label="SMA200" value={sig.indicators.sma200} lastPrice={sig.lastPrice} />}
|
| 928 |
+
{sig.indicators.ema12 && <MABadge label="EMA12" value={sig.indicators.ema12} lastPrice={sig.lastPrice} />}
|
| 929 |
+
{sig.indicators.ema26 && <MABadge label="EMA26" value={sig.indicators.ema26} lastPrice={sig.lastPrice} />}
|
| 930 |
</div>
|
| 931 |
{/* Bollinger */}
|
| 932 |
{sig.indicators.bollinger && (
|
| 933 |
<div className="mt-3 flex items-center gap-4 justify-center text-xs">
|
| 934 |
+
<span className="text-red-400">BB Alt: ₺{sig.indicators.bollinger.lower.toFixed(2)}</span>
|
| 935 |
+
<span className="text-yellow-400">BB Orta: ₺{sig.indicators.bollinger.middle.toFixed(2)}</span>
|
| 936 |
+
<span className="text-green-400">BB Üst: ₺{sig.indicators.bollinger.upper.toFixed(2)}</span>
|
| 937 |
</div>
|
| 938 |
)}
|
| 939 |
</div>
|
|
|
|
| 984 |
)
|
| 985 |
}
|
| 986 |
|
| 987 |
+
function MABadge({ label, value, lastPrice }: { label: string; value: number; lastPrice: number }) {
|
| 988 |
const above = lastPrice > value
|
| 989 |
return (
|
| 990 |
<div className={`text-center rounded px-2 py-1.5 ${above ? 'bg-green-900/30' : 'bg-red-900/30'}`}>
|
| 991 |
<div className="text-[10px] text-gray-500">{label}</div>
|
| 992 |
+
<div className="text-xs font-mono">₺{value.toFixed(2)}</div>
|
| 993 |
<div className={`text-[9px] ${above ? 'text-green-500' : 'text-red-500'}`}>{above ? '▲ Üstünde' : '▼ Altında'}</div>
|
| 994 |
</div>
|
| 995 |
)
|
huggingface-space/nextjs-app/src/app/ml-scan/page.tsx
CHANGED
|
@@ -6,7 +6,6 @@ import Link from 'next/link';
|
|
| 6 |
|
| 7 |
import { fetchJson } from '@/lib/http';
|
| 8 |
import { logger } from '@/lib/logger';
|
| 9 |
-
import { useMarket } from '@/contexts/MarketContext';
|
| 10 |
|
| 11 |
// Force dynamic rendering
|
| 12 |
export const dynamic = 'force-dynamic';
|
|
@@ -74,19 +73,13 @@ interface MLScanResult {
|
|
| 74 |
models_used?: number;
|
| 75 |
}
|
| 76 |
|
| 77 |
-
const
|
| 78 |
'BIST 30': { name: 'bist30', expected: 30 },
|
| 79 |
'BIST 50': { name: 'bist50', expected: 50 },
|
| 80 |
'BIST 100': { name: 'bist100', expected: 100 },
|
| 81 |
'Tüm BIST': { name: 'all', expected: 500 },
|
| 82 |
};
|
| 83 |
|
| 84 |
-
const US_UNIVERSE: Record<string, { name: string; expected: number }> = {
|
| 85 |
-
'Popular 30': { name: 'us_popular', expected: 30 },
|
| 86 |
-
'S&P 500 Top 50': { name: 'us_sp500', expected: 50 },
|
| 87 |
-
'Tech 30': { name: 'us_tech', expected: 30 },
|
| 88 |
-
};
|
| 89 |
-
|
| 90 |
function chunkArray<T>(arr: T[], size: number): T[][] {
|
| 91 |
const out: T[][] = [];
|
| 92 |
const n = Math.max(1, Math.floor(size));
|
|
@@ -114,15 +107,10 @@ export default function MLScanPage() {
|
|
| 114 |
const [loading, setLoading] = useState(false);
|
| 115 |
const [filter, setFilter] = useState<'all' | 'BUY' | 'SELL' | 'HOLD'>('all');
|
| 116 |
const [minConfidence, setMinConfidence] = useState(10);
|
| 117 |
-
const { market } = useMarket();
|
| 118 |
-
const isUS = market === 'us';
|
| 119 |
-
const currencySymbol = isUS ? '$' : '₺';
|
| 120 |
-
const MARKET_UNIVERSE = isUS ? US_UNIVERSE : BIST_UNIVERSE;
|
| 121 |
-
const defaultSelection = isUS ? 'Popular 30' : 'Popüler';
|
| 122 |
|
| 123 |
// Gelişmiş Ayarlar
|
| 124 |
const [showAdvanced, setShowAdvanced] = useState(false);
|
| 125 |
-
const [marketSelection, setMarketSelection] = useState<
|
| 126 |
const [modelType, setModelType] = useState<'ensemble' | 'xgboost' | 'lightgbm' | 'rf'>('ensemble');
|
| 127 |
const [predictionDays, setPredictionDays] = useState(7);
|
| 128 |
const [scanStatus, setScanStatus] = useState<string>('');
|
|
@@ -158,24 +146,18 @@ export default function MLScanPage() {
|
|
| 158 |
|
| 159 |
const handleScan = async () => {
|
| 160 |
setLoading(true);
|
| 161 |
-
setScanStatus(
|
| 162 |
|
| 163 |
try {
|
| 164 |
// Piyasa seçimine göre hisse listesi al
|
| 165 |
let symbols: string[] = [];
|
| 166 |
|
| 167 |
-
if (marketSelection === 'Popüler'
|
| 168 |
-
|
| 169 |
-
|
| 170 |
-
symbols = Array.isArray(uniData?.symbols) ? uniData.symbols : [];
|
| 171 |
-
} else {
|
| 172 |
-
const stocksData = await fetchJson<Record<string, unknown>>(`/api/popular-stocks`, { method: 'GET' }, { timeoutMs: 12000, retries: 1 });
|
| 173 |
-
symbols = Array.isArray(stocksData.stocks) ? stocksData.stocks as string[] : [];
|
| 174 |
-
}
|
| 175 |
} else {
|
| 176 |
const uni = MARKET_UNIVERSE[marketSelection];
|
| 177 |
-
|
| 178 |
-
setScanStatus(isUS ? `Fetching ${marketSelection} list...` : `${marketSelection} listesi alınıyor (resmi kaynak)...`);
|
| 179 |
const universeData = await fetchJson<Record<string, unknown>>(
|
| 180 |
`/api/universe?name=${encodeURIComponent(uni.name)}`,
|
| 181 |
{ method: 'GET' },
|
|
@@ -183,143 +165,17 @@ export default function MLScanPage() {
|
|
| 183 |
);
|
| 184 |
symbols = Array.isArray(universeData?.symbols) ? universeData.symbols : [];
|
| 185 |
|
|
|
|
| 186 |
if (symbols.length < Math.floor(uni.expected * 0.9)) {
|
| 187 |
-
throw new Error(
|
| 188 |
-
? `${marketSelection} list unavailable (expected ~${uni.expected}, got ${symbols.length})`
|
| 189 |
-
: `${marketSelection} listesi alınamadı (beklenen ~${uni.expected}, gelen ${symbols.length}). Backend /api/universe çalışmıyor olabilir.`);
|
| 190 |
}
|
| 191 |
}
|
| 192 |
|
| 193 |
if (symbols.length === 0) {
|
| 194 |
-
throw new Error(
|
| 195 |
}
|
| 196 |
|
| 197 |
-
|
| 198 |
-
const batches = chunkArray(symbols, batchSize);
|
| 199 |
-
|
| 200 |
-
if (isUS) {
|
| 201 |
-
setScanStatus(`${symbols.length} stocks — ML projection + technical scan starting...`);
|
| 202 |
-
const predictions: Record<string, unknown>[] = [];
|
| 203 |
-
const allSignals: Record<string, unknown>[] = [];
|
| 204 |
-
|
| 205 |
-
for (let i = 0; i < batches.length; i++) {
|
| 206 |
-
const batch = batches[i];
|
| 207 |
-
const done = Math.min((i + 1) * batchSize, symbols.length);
|
| 208 |
-
setScanStatus(`ML predictions: ${done}/${symbols.length}...`);
|
| 209 |
-
|
| 210 |
-
try {
|
| 211 |
-
const predictionsData = await fetchJson<Record<string, unknown>>(
|
| 212 |
-
`/api/ml-predictions`,
|
| 213 |
-
{ method: 'POST' },
|
| 214 |
-
{
|
| 215 |
-
timeoutMs: 45000,
|
| 216 |
-
retries: 0,
|
| 217 |
-
jsonBody: {
|
| 218 |
-
symbols: batch,
|
| 219 |
-
days_ahead: predictionDays,
|
| 220 |
-
model: modelType,
|
| 221 |
-
market: 'us',
|
| 222 |
-
},
|
| 223 |
-
}
|
| 224 |
-
);
|
| 225 |
-
const batchPreds = Array.isArray(predictionsData?.predictions) ? predictionsData.predictions : [];
|
| 226 |
-
predictions.push(...batchPreds);
|
| 227 |
-
} catch (e) {
|
| 228 |
-
logger.warn('US ML prediction batch failed:', e);
|
| 229 |
-
}
|
| 230 |
-
|
| 231 |
-
setScanStatus(`Technical signals: ${done}/${symbols.length}...`);
|
| 232 |
-
|
| 233 |
-
try {
|
| 234 |
-
const resp = await fetchJson<Record<string, unknown>>(
|
| 235 |
-
`/api/trading-signals`,
|
| 236 |
-
{ method: 'POST' },
|
| 237 |
-
{
|
| 238 |
-
timeoutMs: 45000,
|
| 239 |
-
retries: 0,
|
| 240 |
-
jsonBody: { symbols: batch, market: 'us' },
|
| 241 |
-
}
|
| 242 |
-
);
|
| 243 |
-
const items = Array.isArray(resp?.signals) ? resp.signals as Record<string, unknown>[] : [];
|
| 244 |
-
allSignals.push(...items);
|
| 245 |
-
} catch (e) {
|
| 246 |
-
logger.warn('US trading-signals batch failed:', e);
|
| 247 |
-
}
|
| 248 |
-
}
|
| 249 |
-
|
| 250 |
-
if (predictions.length === 0) {
|
| 251 |
-
throw new Error('US ML scan returned 0 predictions. The local ML pipeline may be unavailable.');
|
| 252 |
-
}
|
| 253 |
-
|
| 254 |
-
if (allSignals.length === 0) {
|
| 255 |
-
throw new Error('Technical scan returned 0 results. Yahoo Finance may be rate-limiting.');
|
| 256 |
-
}
|
| 257 |
-
|
| 258 |
-
const predictionsBySymbol = new Map<string, Record<string, unknown>>();
|
| 259 |
-
predictions.forEach((item) => {
|
| 260 |
-
if (item?.symbol) predictionsBySymbol.set(String(item.symbol).toUpperCase(), item);
|
| 261 |
-
});
|
| 262 |
-
|
| 263 |
-
const mappedResults: MLScanResult[] = allSignals.map((item) => {
|
| 264 |
-
const symbol = String(item?.symbol || '').toUpperCase();
|
| 265 |
-
const pred = predictionsBySymbol.get(symbol) || null;
|
| 266 |
-
const techSignal = String(item?.signal || 'HOLD').toUpperCase() as 'BUY' | 'SELL' | 'HOLD';
|
| 267 |
-
const mlSignal = String(pred?.signal || 'HOLD').toUpperCase() as 'BUY' | 'SELL' | 'HOLD';
|
| 268 |
-
const indicators = (item?.indicators || {}) as Record<string, unknown>;
|
| 269 |
-
const mlConfidenceRaw = Number(pred?.confidence) || 0;
|
| 270 |
-
const mlConfidence = mlConfidenceRaw > 0 && mlConfidenceRaw <= 1 ? mlConfidenceRaw * 100 : mlConfidenceRaw;
|
| 271 |
-
const techScore = Math.max(0, Math.min(100, ((Number(item?.signalScore) || 0) + 100) / 2));
|
| 272 |
-
const combinedScore = Math.round((mlConfidence + techScore) / 2);
|
| 273 |
-
|
| 274 |
-
let finalRecommendation: 'BUY' | 'SELL' | 'HOLD' = 'HOLD';
|
| 275 |
-
if (mlSignal === 'SELL' || techSignal === 'SELL') {
|
| 276 |
-
finalRecommendation = mlSignal === 'BUY' && techSignal === 'SELL'
|
| 277 |
-
? 'HOLD'
|
| 278 |
-
: mlSignal === 'SELL' && techSignal === 'BUY'
|
| 279 |
-
? 'HOLD'
|
| 280 |
-
: 'SELL';
|
| 281 |
-
} else if (mlSignal === 'BUY') {
|
| 282 |
-
finalRecommendation = 'BUY';
|
| 283 |
-
} else if (techSignal === 'BUY' && mlSignal === 'HOLD') {
|
| 284 |
-
finalRecommendation = 'BUY';
|
| 285 |
-
}
|
| 286 |
-
|
| 287 |
-
return {
|
| 288 |
-
symbol,
|
| 289 |
-
current_price: Number(pred?.current_price ?? item?.lastPrice) || 0,
|
| 290 |
-
change_percent: Number(pred?.predicted_change_pct ?? pred?.prediction_change ?? 0),
|
| 291 |
-
recommendation: finalRecommendation,
|
| 292 |
-
ml_recommendation: mlSignal,
|
| 293 |
-
confidence: mlConfidence,
|
| 294 |
-
total_score: combinedScore,
|
| 295 |
-
technical_signal: techSignal,
|
| 296 |
-
indicators: {
|
| 297 |
-
rsi: indicators?.rsi as number | null ?? null,
|
| 298 |
-
macd_trend: typeof indicators?.macd === 'object' && indicators.macd
|
| 299 |
-
? ((indicators.macd as Record<string, unknown>).histogram as number > 0 ? 'bullish' : 'bearish')
|
| 300 |
-
: null,
|
| 301 |
-
bollinger_position: null,
|
| 302 |
-
volume_ratio_20d: indicators?.volume_ratio as number | null ?? null,
|
| 303 |
-
},
|
| 304 |
-
prediction_7d: Number(pred?.predicted_price) || undefined,
|
| 305 |
-
r2_score: pred?.r2_score as number | undefined,
|
| 306 |
-
mae: pred?.mae as number | undefined,
|
| 307 |
-
direction_accuracy: pred?.direction_accuracy as number | undefined,
|
| 308 |
-
models_used: pred?.models_used as number | undefined,
|
| 309 |
-
model_source: String(pred?.model || modelType || '').toUpperCase(),
|
| 310 |
-
};
|
| 311 |
-
});
|
| 312 |
-
|
| 313 |
-
setResults(mappedResults);
|
| 314 |
-
const listedNow = mappedResults.filter((r) => {
|
| 315 |
-
const matchesFilter = filter === 'all' || r.recommendation === filter;
|
| 316 |
-
const effectiveScore = Math.max(r.confidence, r.total_score);
|
| 317 |
-
return matchesFilter && effectiveScore >= minConfidence;
|
| 318 |
-
}).length;
|
| 319 |
-
setScanStatus(`✅ Scan complete! ${mappedResults.length} stocks analyzed, ${listedNow} listed after filter.`);
|
| 320 |
-
} else {
|
| 321 |
-
// BIST mode: original ML + scan-signals flow
|
| 322 |
-
setScanStatus(`${symbols.length} hisse için ML projeksiyonu + teknik tarama başlıyor (${modelType.toUpperCase()})...`);
|
| 323 |
|
| 324 |
const batchSize = symbols.length >= 80 ? 10 : symbols.length >= 40 ? 15 : symbols.length;
|
| 325 |
const batches = chunkArray(symbols, batchSize);
|
|
@@ -341,7 +197,6 @@ export default function MLScanPage() {
|
|
| 341 |
symbols: batch,
|
| 342 |
days_ahead: predictionDays,
|
| 343 |
model: modelType,
|
| 344 |
-
market: 'bist',
|
| 345 |
},
|
| 346 |
}
|
| 347 |
);
|
|
@@ -505,11 +360,10 @@ export default function MLScanPage() {
|
|
| 505 |
setScanStatus(
|
| 506 |
`✅ Tarama tamamlandı! ${mappedResults.length} hisse analiz edildi, filtre sonrası ${listedNow} hisse listeleniyor.`
|
| 507 |
);
|
| 508 |
-
} // end BIST else block
|
| 509 |
} catch (error) {
|
| 510 |
console.error('ML scan failed:', error);
|
| 511 |
-
const errorMessage = error instanceof Error ? error.message :
|
| 512 |
-
setScanStatus(`❌
|
| 513 |
} finally {
|
| 514 |
setLoading(false);
|
| 515 |
}
|
|
@@ -534,7 +388,7 @@ export default function MLScanPage() {
|
|
| 534 |
<div className="mb-6">
|
| 535 |
<div className="flex items-center gap-3 mb-2">
|
| 536 |
<Zap className="w-8 h-8 text-purple-600" />
|
| 537 |
-
<h1 className="text-3xl font-bold text-gray-900">
|
| 538 |
{apiHealth === 'healthy' && (
|
| 539 |
<span className="flex items-center gap-1 text-xs text-green-600 bg-green-50 px-2 py-1 rounded-full">
|
| 540 |
<CheckCircle className="w-3 h-3" /> API Aktif
|
|
@@ -547,17 +401,13 @@ export default function MLScanPage() {
|
|
| 547 |
)}
|
| 548 |
</div>
|
| 549 |
<p className="text-gray-600">
|
| 550 |
-
|
| 551 |
-
? 'Scans US stocks with ML projections and technical indicators together. BUY appears when ML direction and technical structure align; conflicts are downgraded to HOLD.'
|
| 552 |
-
: <>ML projeksiyonu (deneysel) ile teknik taramayı birleştirir. <strong>AL</strong> sinyali:
|
| 553 |
ML yönü BUY ise teknik kapılar geçmese bile (HOLD dahil) görünebilir.
|
| 554 |
-
Çakışma (ML BUY ↔ Teknik SELL) durumunda HOLD verilir.
|
| 555 |
-
}
|
| 556 |
</p>
|
| 557 |
</div>
|
| 558 |
|
| 559 |
{/* Model Bilgi Kartı */}
|
| 560 |
-
{!isUS && (
|
| 561 |
<div className="bg-gradient-to-r from-purple-50 to-blue-50 border border-purple-200 rounded-lg p-4 mb-6">
|
| 562 |
<h3 className="font-semibold text-purple-800 mb-2">🆕 Gelişmiş ML Modelleri</h3>
|
| 563 |
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 text-sm">
|
|
@@ -579,7 +429,6 @@ export default function MLScanPage() {
|
|
| 579 |
</div>
|
| 580 |
</div>
|
| 581 |
</div>
|
| 582 |
-
)}
|
| 583 |
|
| 584 |
{/* Control Panel */}
|
| 585 |
<div className="bg-white p-6 rounded-lg shadow-sm mb-6">
|
|
@@ -593,7 +442,7 @@ export default function MLScanPage() {
|
|
| 593 |
className="bg-purple-600 text-white px-6 py-3 rounded-md hover:bg-purple-700 disabled:opacity-50 font-semibold flex items-center gap-2"
|
| 594 |
>
|
| 595 |
{loading ? <RefreshCw className="w-5 h-5 animate-spin" /> : <Zap className="w-5 h-5" />}
|
| 596 |
-
{loading ?
|
| 597 |
</button>
|
| 598 |
|
| 599 |
<button
|
|
@@ -601,12 +450,12 @@ export default function MLScanPage() {
|
|
| 601 |
className="flex items-center gap-2 px-4 py-2 border border-gray-300 rounded-md hover:bg-gray-50 text-sm"
|
| 602 |
>
|
| 603 |
<Settings className="w-4 h-4" />
|
| 604 |
-
|
| 605 |
</button>
|
| 606 |
|
| 607 |
{results.length > 0 && (
|
| 608 |
<p className="text-gray-600 text-sm">
|
| 609 |
-
{filteredResults.length} / {results.length}
|
| 610 |
</p>
|
| 611 |
)}
|
| 612 |
</div>
|
|
@@ -619,15 +468,15 @@ export default function MLScanPage() {
|
|
| 619 |
onChange={(e) => setFilter(e.target.value as 'all' | 'BUY' | 'SELL' | 'HOLD')}
|
| 620 |
className="px-3 py-2 border border-gray-300 rounded-md text-sm"
|
| 621 |
>
|
| 622 |
-
<option value="all">
|
| 623 |
-
<option value="BUY">
|
| 624 |
-
<option value="SELL">
|
| 625 |
-
<option value="HOLD">
|
| 626 |
</select>
|
| 627 |
</div>
|
| 628 |
|
| 629 |
<div className="flex items-center gap-2">
|
| 630 |
-
<span className="text-sm text-gray-600">
|
| 631 |
<input
|
| 632 |
type="number"
|
| 633 |
min="0"
|
|
@@ -644,36 +493,24 @@ export default function MLScanPage() {
|
|
| 644 |
{/* Gelişmiş Ayarlar Paneli */}
|
| 645 |
{showAdvanced && (
|
| 646 |
<div className="border-t pt-4 mt-2">
|
| 647 |
-
<h4 className="font-medium text-gray-700 mb-3">⚙️
|
| 648 |
<div className="grid grid-cols-1 md:grid-cols-4 gap-4">
|
| 649 |
<div>
|
| 650 |
-
<label htmlFor="ml-market" className="block text-sm font-medium text-gray-600 mb-1">
|
| 651 |
<select
|
| 652 |
id="ml-market"
|
| 653 |
value={marketSelection}
|
| 654 |
-
onChange={(e) => setMarketSelection(e.target.value)}
|
| 655 |
className="w-full px-3 py-2 border border-gray-300 rounded-md text-sm"
|
| 656 |
>
|
| 657 |
-
|
| 658 |
-
|
| 659 |
-
|
| 660 |
-
|
| 661 |
-
|
| 662 |
-
))}
|
| 663 |
-
</>
|
| 664 |
-
) : (
|
| 665 |
-
<>
|
| 666 |
-
<option value="Popüler">Popüler Hisseler</option>
|
| 667 |
-
<option value="BIST 30">BIST 30</option>
|
| 668 |
-
<option value="BIST 50">BIST 50</option>
|
| 669 |
-
<option value="BIST 100">BIST 100</option>
|
| 670 |
-
<option value="Tüm BIST">Tüm BIST Hisseleri</option>
|
| 671 |
-
</>
|
| 672 |
-
)}
|
| 673 |
</select>
|
| 674 |
</div>
|
| 675 |
|
| 676 |
-
{!isUS && (
|
| 677 |
<div>
|
| 678 |
<label htmlFor="ml-model" className="block text-sm font-medium text-gray-600 mb-1">ML Model</label>
|
| 679 |
<select
|
|
@@ -688,9 +525,7 @@ export default function MLScanPage() {
|
|
| 688 |
<option value="rf">RandomForest</option>
|
| 689 |
</select>
|
| 690 |
</div>
|
| 691 |
-
)}
|
| 692 |
|
| 693 |
-
{!isUS && (
|
| 694 |
<div>
|
| 695 |
<label htmlFor="ml-days" className="block text-sm font-medium text-gray-600 mb-1">Tahmin Süresi</label>
|
| 696 |
<select
|
|
@@ -707,15 +542,12 @@ export default function MLScanPage() {
|
|
| 707 |
<option value={30}>30 Gün (1 Ay)</option>
|
| 708 |
</select>
|
| 709 |
</div>
|
| 710 |
-
)}
|
| 711 |
|
| 712 |
-
{!isUS && (
|
| 713 |
<div className="flex items-end">
|
| 714 |
<div className="text-xs text-gray-500 bg-gray-50 p-2 rounded">
|
| 715 |
<strong>Ensemble:</strong> XGBoost + LightGBM + RF modellerinin ortalaması
|
| 716 |
</div>
|
| 717 |
</div>
|
| 718 |
-
)}
|
| 719 |
</div>
|
| 720 |
</div>
|
| 721 |
)}
|
|
@@ -737,19 +569,19 @@ export default function MLScanPage() {
|
|
| 737 |
{results.length > 0 && (
|
| 738 |
<div className="grid grid-cols-1 md:grid-cols-4 gap-4 mb-6">
|
| 739 |
<div className="bg-white p-4 rounded-lg shadow-sm">
|
| 740 |
-
<p className="text-sm text-gray-600">
|
| 741 |
<p className="text-2xl font-bold text-gray-900">{filteredResults.length}</p>
|
| 742 |
</div>
|
| 743 |
<div className="bg-green-50 p-4 rounded-lg shadow-sm">
|
| 744 |
-
<p className="text-sm text-green-700">
|
| 745 |
<p className="text-2xl font-bold text-green-800">{buyCount}</p>
|
| 746 |
</div>
|
| 747 |
<div className="bg-yellow-50 p-4 rounded-lg shadow-sm">
|
| 748 |
-
<p className="text-sm text-yellow-700">
|
| 749 |
<p className="text-2xl font-bold text-yellow-800">{holdCount}</p>
|
| 750 |
</div>
|
| 751 |
<div className="bg-red-50 p-4 rounded-lg shadow-sm">
|
| 752 |
-
<p className="text-sm text-red-700">
|
| 753 |
<p className="text-2xl font-bold text-red-800">{sellCount}</p>
|
| 754 |
</div>
|
| 755 |
</div>
|
|
@@ -759,20 +591,17 @@ export default function MLScanPage() {
|
|
| 759 |
{loading && (
|
| 760 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 761 |
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-purple-600 mx-auto mb-4"></div>
|
| 762 |
-
<p className="text-gray-600">
|
| 763 |
-
<p className="text-sm text-gray-500 mt-2">
|
| 764 |
</div>
|
| 765 |
)}
|
| 766 |
|
| 767 |
{!loading && results.length === 0 && (
|
| 768 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 769 |
<Zap className="w-16 h-16 text-gray-400 mx-auto mb-4" />
|
| 770 |
-
<h3 className="text-lg font-semibold text-gray-900 mb-2">
|
| 771 |
<p className="text-gray-600">
|
| 772 |
-
|
| 773 |
-
? 'Click the button to scan US stocks with technical indicators'
|
| 774 |
-
: 'BIST hisselerini ML projeksiyonu + teknik kapılar ile taramak için butona tıklayın'
|
| 775 |
-
}
|
| 776 |
</p>
|
| 777 |
</div>
|
| 778 |
)}
|
|
@@ -815,11 +644,11 @@ export default function MLScanPage() {
|
|
| 815 |
|
| 816 |
<div className="flex items-center justify-between mb-3">
|
| 817 |
<div>
|
| 818 |
-
<p className="text-sm text-gray-600">
|
| 819 |
-
<p className="font-semibold">{
|
| 820 |
</div>
|
| 821 |
<div className="text-right">
|
| 822 |
-
<p className="text-sm text-gray-600">
|
| 823 |
<p
|
| 824 |
className={`font-semibold ${
|
| 825 |
(stock.change_percent ?? 0) >= 0 ? 'text-green-600' : 'text-red-600'
|
|
@@ -834,7 +663,7 @@ export default function MLScanPage() {
|
|
| 834 |
<div className="space-y-2">
|
| 835 |
<div>
|
| 836 |
<div className="flex items-center justify-between text-sm mb-1">
|
| 837 |
-
<span className="text-gray-600">
|
| 838 |
<span className="font-semibold">{stock.confidence}%</span>
|
| 839 |
</div>
|
| 840 |
<div className="w-full bg-gray-200 rounded-full h-2">
|
|
@@ -848,7 +677,7 @@ export default function MLScanPage() {
|
|
| 848 |
{/* Data Quality Badge */}
|
| 849 |
{stock.data_quality && (
|
| 850 |
<div className="flex items-center gap-2 text-xs">
|
| 851 |
-
<span className="text-gray-600">
|
| 852 |
<span className={`font-semibold ${
|
| 853 |
stock.data_quality.score >= 0.8 ? 'text-green-600' :
|
| 854 |
stock.data_quality.score >= 0.6 ? 'text-yellow-600' : 'text-red-600'
|
|
@@ -898,7 +727,7 @@ export default function MLScanPage() {
|
|
| 898 |
{/* Technical gates */}
|
| 899 |
{stock.gates && (
|
| 900 |
<div className="pt-2 border-t">
|
| 901 |
-
<p className="text-xs text-gray-600 mb-1">
|
| 902 |
<div className="flex flex-wrap gap-1" title={(stock.gates.reasons || []).join('\n')}>
|
| 903 |
<span className={`text-xs px-2 py-0.5 rounded-full ${stock.gates.trend_ok ? 'bg-green-100 text-green-700' : 'bg-red-100 text-red-700'}`}>
|
| 904 |
Trend
|
|
@@ -907,16 +736,16 @@ export default function MLScanPage() {
|
|
| 907 |
Momentum
|
| 908 |
</span>
|
| 909 |
<span className={`text-xs px-2 py-0.5 rounded-full ${stock.gates.volume_ok ? 'bg-green-100 text-green-700' : 'bg-red-100 text-red-700'}`}>
|
| 910 |
-
|
| 911 |
</span>
|
| 912 |
<span className={`text-xs px-2 py-0.5 rounded-full ${stock.gates.volatility_ok ? 'bg-green-100 text-green-700' : 'bg-red-100 text-red-700'}`}>
|
| 913 |
-
|
| 914 |
</span>
|
| 915 |
</div>
|
| 916 |
|
| 917 |
{(stock.gates.reasons || []).includes('backend_endpoint_missing:scan-signals') && (
|
| 918 |
<p className="mt-2 text-xs text-amber-700">
|
| 919 |
-
|
| 920 |
</p>
|
| 921 |
)}
|
| 922 |
</div>
|
|
@@ -924,11 +753,11 @@ export default function MLScanPage() {
|
|
| 924 |
|
| 925 |
<div className="grid grid-cols-2 gap-2 pt-2 border-t">
|
| 926 |
<div>
|
| 927 |
-
<p className="text-xs text-gray-600">
|
| 928 |
<p className="font-semibold text-sm">{formatScorePct(stock.trend_score)}</p>
|
| 929 |
</div>
|
| 930 |
<div>
|
| 931 |
-
<p className="text-xs text-gray-600">
|
| 932 |
<p className="font-semibold text-sm">{formatScorePct(stock.momentum_score)}</p>
|
| 933 |
</div>
|
| 934 |
</div>
|
|
@@ -1044,21 +873,21 @@ export default function MLScanPage() {
|
|
| 1044 |
<div className="pt-2 border-t text-xs space-y-1">
|
| 1045 |
{stock.prediction_7d !== undefined && stock.prediction_7d !== null && (
|
| 1046 |
<div className="flex justify-between">
|
| 1047 |
-
<span className="text-gray-600">
|
| 1048 |
<span className={`font-semibold ${
|
| 1049 |
(stock.prediction_7d ?? 0) > (stock.current_price ?? 0) ? 'text-green-600' : 'text-red-600'
|
| 1050 |
}`}>
|
| 1051 |
-
{
|
| 1052 |
</span>
|
| 1053 |
</div>
|
| 1054 |
)}
|
| 1055 |
{stock.prediction_30d !== undefined && stock.prediction_30d !== null && (
|
| 1056 |
<div className="flex justify-between">
|
| 1057 |
-
<span className="text-gray-600">
|
| 1058 |
<span className={`font-semibold ${
|
| 1059 |
(stock.prediction_30d ?? 0) > (stock.current_price ?? 0) ? 'text-green-600' : 'text-red-600'
|
| 1060 |
}`}>
|
| 1061 |
-
{
|
| 1062 |
</span>
|
| 1063 |
</div>
|
| 1064 |
)}
|
|
|
|
| 6 |
|
| 7 |
import { fetchJson } from '@/lib/http';
|
| 8 |
import { logger } from '@/lib/logger';
|
|
|
|
| 9 |
|
| 10 |
// Force dynamic rendering
|
| 11 |
export const dynamic = 'force-dynamic';
|
|
|
|
| 73 |
models_used?: number;
|
| 74 |
}
|
| 75 |
|
| 76 |
+
const MARKET_UNIVERSE: Record<'BIST 30' | 'BIST 50' | 'BIST 100' | 'Tüm BIST', { name: 'bist30' | 'bist50' | 'bist100' | 'all'; expected: number }> = {
|
| 77 |
'BIST 30': { name: 'bist30', expected: 30 },
|
| 78 |
'BIST 50': { name: 'bist50', expected: 50 },
|
| 79 |
'BIST 100': { name: 'bist100', expected: 100 },
|
| 80 |
'Tüm BIST': { name: 'all', expected: 500 },
|
| 81 |
};
|
| 82 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
function chunkArray<T>(arr: T[], size: number): T[][] {
|
| 84 |
const out: T[][] = [];
|
| 85 |
const n = Math.max(1, Math.floor(size));
|
|
|
|
| 107 |
const [loading, setLoading] = useState(false);
|
| 108 |
const [filter, setFilter] = useState<'all' | 'BUY' | 'SELL' | 'HOLD'>('all');
|
| 109 |
const [minConfidence, setMinConfidence] = useState(10);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 110 |
|
| 111 |
// Gelişmiş Ayarlar
|
| 112 |
const [showAdvanced, setShowAdvanced] = useState(false);
|
| 113 |
+
const [marketSelection, setMarketSelection] = useState<'BIST 30' | 'BIST 50' | 'BIST 100' | 'Tüm BIST' | 'Popüler'>('Popüler');
|
| 114 |
const [modelType, setModelType] = useState<'ensemble' | 'xgboost' | 'lightgbm' | 'rf'>('ensemble');
|
| 115 |
const [predictionDays, setPredictionDays] = useState(7);
|
| 116 |
const [scanStatus, setScanStatus] = useState<string>('');
|
|
|
|
| 146 |
|
| 147 |
const handleScan = async () => {
|
| 148 |
setLoading(true);
|
| 149 |
+
setScanStatus('Hisseler alınıyor...');
|
| 150 |
|
| 151 |
try {
|
| 152 |
// Piyasa seçimine göre hisse listesi al
|
| 153 |
let symbols: string[] = [];
|
| 154 |
|
| 155 |
+
if (marketSelection === 'Popüler') {
|
| 156 |
+
const stocksData = await fetchJson<Record<string, unknown>>(`/api/popular-stocks`, { method: 'GET' }, { timeoutMs: 12000, retries: 1 });
|
| 157 |
+
symbols = Array.isArray(stocksData.stocks) ? stocksData.stocks as string[] : [];
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 158 |
} else {
|
| 159 |
const uni = MARKET_UNIVERSE[marketSelection];
|
| 160 |
+
setScanStatus(`${marketSelection} listesi alınıyor (resmi kaynak)...`);
|
|
|
|
| 161 |
const universeData = await fetchJson<Record<string, unknown>>(
|
| 162 |
`/api/universe?name=${encodeURIComponent(uni.name)}`,
|
| 163 |
{ method: 'GET' },
|
|
|
|
| 165 |
);
|
| 166 |
symbols = Array.isArray(universeData?.symbols) ? universeData.symbols : [];
|
| 167 |
|
| 168 |
+
// Safety: if backend cannot provide a real list, fail loudly (no toy fallbacks).
|
| 169 |
if (symbols.length < Math.floor(uni.expected * 0.9)) {
|
| 170 |
+
throw new Error(`${marketSelection} listesi alınamadı (beklenen ~${uni.expected}, gelen ${symbols.length}). Backend /api/universe çalışmıyor olabilir.`);
|
|
|
|
|
|
|
| 171 |
}
|
| 172 |
}
|
| 173 |
|
| 174 |
if (symbols.length === 0) {
|
| 175 |
+
throw new Error('Taranacak hisse bulunamadı');
|
| 176 |
}
|
| 177 |
|
| 178 |
+
setScanStatus(`${symbols.length} hisse için ML projeksiyonu + teknik tarama başlıyor (${modelType.toUpperCase()})...`);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 179 |
|
| 180 |
const batchSize = symbols.length >= 80 ? 10 : symbols.length >= 40 ? 15 : symbols.length;
|
| 181 |
const batches = chunkArray(symbols, batchSize);
|
|
|
|
| 197 |
symbols: batch,
|
| 198 |
days_ahead: predictionDays,
|
| 199 |
model: modelType,
|
|
|
|
| 200 |
},
|
| 201 |
}
|
| 202 |
);
|
|
|
|
| 360 |
setScanStatus(
|
| 361 |
`✅ Tarama tamamlandı! ${mappedResults.length} hisse analiz edildi, filtre sonrası ${listedNow} hisse listeleniyor.`
|
| 362 |
);
|
|
|
|
| 363 |
} catch (error) {
|
| 364 |
console.error('ML scan failed:', error);
|
| 365 |
+
const errorMessage = error instanceof Error ? error.message : 'Bilinmeyen hata';
|
| 366 |
+
setScanStatus(`❌ Hata: ${errorMessage}`);
|
| 367 |
} finally {
|
| 368 |
setLoading(false);
|
| 369 |
}
|
|
|
|
| 388 |
<div className="mb-6">
|
| 389 |
<div className="flex items-center gap-3 mb-2">
|
| 390 |
<Zap className="w-8 h-8 text-purple-600" />
|
| 391 |
+
<h1 className="text-3xl font-bold text-gray-900">ML Projeksiyonu + Teknik Tarama</h1>
|
| 392 |
{apiHealth === 'healthy' && (
|
| 393 |
<span className="flex items-center gap-1 text-xs text-green-600 bg-green-50 px-2 py-1 rounded-full">
|
| 394 |
<CheckCircle className="w-3 h-3" /> API Aktif
|
|
|
|
| 401 |
)}
|
| 402 |
</div>
|
| 403 |
<p className="text-gray-600">
|
| 404 |
+
ML projeksiyonu (deneysel) ile teknik taramayı birleştirir. <strong>AL</strong> sinyali:
|
|
|
|
|
|
|
| 405 |
ML yönü BUY ise teknik kapılar geçmese bile (HOLD dahil) görünebilir.
|
| 406 |
+
Çakışma (ML BUY ↔ Teknik SELL) durumunda HOLD verilir.
|
|
|
|
| 407 |
</p>
|
| 408 |
</div>
|
| 409 |
|
| 410 |
{/* Model Bilgi Kartı */}
|
|
|
|
| 411 |
<div className="bg-gradient-to-r from-purple-50 to-blue-50 border border-purple-200 rounded-lg p-4 mb-6">
|
| 412 |
<h3 className="font-semibold text-purple-800 mb-2">🆕 Gelişmiş ML Modelleri</h3>
|
| 413 |
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 text-sm">
|
|
|
|
| 429 |
</div>
|
| 430 |
</div>
|
| 431 |
</div>
|
|
|
|
| 432 |
|
| 433 |
{/* Control Panel */}
|
| 434 |
<div className="bg-white p-6 rounded-lg shadow-sm mb-6">
|
|
|
|
| 442 |
className="bg-purple-600 text-white px-6 py-3 rounded-md hover:bg-purple-700 disabled:opacity-50 font-semibold flex items-center gap-2"
|
| 443 |
>
|
| 444 |
{loading ? <RefreshCw className="w-5 h-5 animate-spin" /> : <Zap className="w-5 h-5" />}
|
| 445 |
+
{loading ? 'Taranıyor...' : 'Tarama Başlat'}
|
| 446 |
</button>
|
| 447 |
|
| 448 |
<button
|
|
|
|
| 450 |
className="flex items-center gap-2 px-4 py-2 border border-gray-300 rounded-md hover:bg-gray-50 text-sm"
|
| 451 |
>
|
| 452 |
<Settings className="w-4 h-4" />
|
| 453 |
+
Gelişmiş Ayarlar
|
| 454 |
</button>
|
| 455 |
|
| 456 |
{results.length > 0 && (
|
| 457 |
<p className="text-gray-600 text-sm">
|
| 458 |
+
{filteredResults.length} / {results.length} hisse
|
| 459 |
</p>
|
| 460 |
)}
|
| 461 |
</div>
|
|
|
|
| 468 |
onChange={(e) => setFilter(e.target.value as 'all' | 'BUY' | 'SELL' | 'HOLD')}
|
| 469 |
className="px-3 py-2 border border-gray-300 rounded-md text-sm"
|
| 470 |
>
|
| 471 |
+
<option value="all">Tüm Öneriler</option>
|
| 472 |
+
<option value="BUY">Sadece AL</option>
|
| 473 |
+
<option value="SELL">Sadece SAT</option>
|
| 474 |
+
<option value="HOLD">Sadece TUT</option>
|
| 475 |
</select>
|
| 476 |
</div>
|
| 477 |
|
| 478 |
<div className="flex items-center gap-2">
|
| 479 |
+
<span className="text-sm text-gray-600">Min Güven:</span>
|
| 480 |
<input
|
| 481 |
type="number"
|
| 482 |
min="0"
|
|
|
|
| 493 |
{/* Gelişmiş Ayarlar Paneli */}
|
| 494 |
{showAdvanced && (
|
| 495 |
<div className="border-t pt-4 mt-2">
|
| 496 |
+
<h4 className="font-medium text-gray-700 mb-3">⚙️ Tarama Parametreleri</h4>
|
| 497 |
<div className="grid grid-cols-1 md:grid-cols-4 gap-4">
|
| 498 |
<div>
|
| 499 |
+
<label htmlFor="ml-market" className="block text-sm font-medium text-gray-600 mb-1">Piyasa Seçimi</label>
|
| 500 |
<select
|
| 501 |
id="ml-market"
|
| 502 |
value={marketSelection}
|
| 503 |
+
onChange={(e) => setMarketSelection(e.target.value as typeof marketSelection)}
|
| 504 |
className="w-full px-3 py-2 border border-gray-300 rounded-md text-sm"
|
| 505 |
>
|
| 506 |
+
<option value="Popüler">Popüler Hisseler</option>
|
| 507 |
+
<option value="BIST 30">BIST 30</option>
|
| 508 |
+
<option value="BIST 50">BIST 50</option>
|
| 509 |
+
<option value="BIST 100">BIST 100</option>
|
| 510 |
+
<option value="Tüm BIST">Tüm BIST Hisseleri</option>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 511 |
</select>
|
| 512 |
</div>
|
| 513 |
|
|
|
|
| 514 |
<div>
|
| 515 |
<label htmlFor="ml-model" className="block text-sm font-medium text-gray-600 mb-1">ML Model</label>
|
| 516 |
<select
|
|
|
|
| 525 |
<option value="rf">RandomForest</option>
|
| 526 |
</select>
|
| 527 |
</div>
|
|
|
|
| 528 |
|
|
|
|
| 529 |
<div>
|
| 530 |
<label htmlFor="ml-days" className="block text-sm font-medium text-gray-600 mb-1">Tahmin Süresi</label>
|
| 531 |
<select
|
|
|
|
| 542 |
<option value={30}>30 Gün (1 Ay)</option>
|
| 543 |
</select>
|
| 544 |
</div>
|
|
|
|
| 545 |
|
|
|
|
| 546 |
<div className="flex items-end">
|
| 547 |
<div className="text-xs text-gray-500 bg-gray-50 p-2 rounded">
|
| 548 |
<strong>Ensemble:</strong> XGBoost + LightGBM + RF modellerinin ortalaması
|
| 549 |
</div>
|
| 550 |
</div>
|
|
|
|
| 551 |
</div>
|
| 552 |
</div>
|
| 553 |
)}
|
|
|
|
| 569 |
{results.length > 0 && (
|
| 570 |
<div className="grid grid-cols-1 md:grid-cols-4 gap-4 mb-6">
|
| 571 |
<div className="bg-white p-4 rounded-lg shadow-sm">
|
| 572 |
+
<p className="text-sm text-gray-600">Listelenen</p>
|
| 573 |
<p className="text-2xl font-bold text-gray-900">{filteredResults.length}</p>
|
| 574 |
</div>
|
| 575 |
<div className="bg-green-50 p-4 rounded-lg shadow-sm">
|
| 576 |
+
<p className="text-sm text-green-700">AL Önerisi</p>
|
| 577 |
<p className="text-2xl font-bold text-green-800">{buyCount}</p>
|
| 578 |
</div>
|
| 579 |
<div className="bg-yellow-50 p-4 rounded-lg shadow-sm">
|
| 580 |
+
<p className="text-sm text-yellow-700">TUT Önerisi</p>
|
| 581 |
<p className="text-2xl font-bold text-yellow-800">{holdCount}</p>
|
| 582 |
</div>
|
| 583 |
<div className="bg-red-50 p-4 rounded-lg shadow-sm">
|
| 584 |
+
<p className="text-sm text-red-700">SAT Önerisi</p>
|
| 585 |
<p className="text-2xl font-bold text-red-800">{sellCount}</p>
|
| 586 |
</div>
|
| 587 |
</div>
|
|
|
|
| 591 |
{loading && (
|
| 592 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 593 |
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-purple-600 mx-auto mb-4"></div>
|
| 594 |
+
<p className="text-gray-600">Hisseler ML + teknik sinyaller ile taranıyor...</p>
|
| 595 |
+
<p className="text-sm text-gray-500 mt-2">Bu işlem bir kaç dakika sürebilir</p>
|
| 596 |
</div>
|
| 597 |
)}
|
| 598 |
|
| 599 |
{!loading && results.length === 0 && (
|
| 600 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 601 |
<Zap className="w-16 h-16 text-gray-400 mx-auto mb-4" />
|
| 602 |
+
<h3 className="text-lg font-semibold text-gray-900 mb-2">Toplu Tarama</h3>
|
| 603 |
<p className="text-gray-600">
|
| 604 |
+
BIST hisselerini ML projeksiyonu + teknik kapılar ile taramak için butona tıklayın
|
|
|
|
|
|
|
|
|
|
| 605 |
</p>
|
| 606 |
</div>
|
| 607 |
)}
|
|
|
|
| 644 |
|
| 645 |
<div className="flex items-center justify-between mb-3">
|
| 646 |
<div>
|
| 647 |
+
<p className="text-sm text-gray-600">Fiyat</p>
|
| 648 |
+
<p className="font-semibold">₺{(stock.current_price ?? 0).toFixed(2)}</p>
|
| 649 |
</div>
|
| 650 |
<div className="text-right">
|
| 651 |
+
<p className="text-sm text-gray-600">Değişim</p>
|
| 652 |
<p
|
| 653 |
className={`font-semibold ${
|
| 654 |
(stock.change_percent ?? 0) >= 0 ? 'text-green-600' : 'text-red-600'
|
|
|
|
| 663 |
<div className="space-y-2">
|
| 664 |
<div>
|
| 665 |
<div className="flex items-center justify-between text-sm mb-1">
|
| 666 |
+
<span className="text-gray-600">Güven Skoru</span>
|
| 667 |
<span className="font-semibold">{stock.confidence}%</span>
|
| 668 |
</div>
|
| 669 |
<div className="w-full bg-gray-200 rounded-full h-2">
|
|
|
|
| 677 |
{/* Data Quality Badge */}
|
| 678 |
{stock.data_quality && (
|
| 679 |
<div className="flex items-center gap-2 text-xs">
|
| 680 |
+
<span className="text-gray-600">Veri Kalitesi:</span>
|
| 681 |
<span className={`font-semibold ${
|
| 682 |
stock.data_quality.score >= 0.8 ? 'text-green-600' :
|
| 683 |
stock.data_quality.score >= 0.6 ? 'text-yellow-600' : 'text-red-600'
|
|
|
|
| 727 |
{/* Technical gates */}
|
| 728 |
{stock.gates && (
|
| 729 |
<div className="pt-2 border-t">
|
| 730 |
+
<p className="text-xs text-gray-600 mb-1">Kapılar (AL için gerekli):</p>
|
| 731 |
<div className="flex flex-wrap gap-1" title={(stock.gates.reasons || []).join('\n')}>
|
| 732 |
<span className={`text-xs px-2 py-0.5 rounded-full ${stock.gates.trend_ok ? 'bg-green-100 text-green-700' : 'bg-red-100 text-red-700'}`}>
|
| 733 |
Trend
|
|
|
|
| 736 |
Momentum
|
| 737 |
</span>
|
| 738 |
<span className={`text-xs px-2 py-0.5 rounded-full ${stock.gates.volume_ok ? 'bg-green-100 text-green-700' : 'bg-red-100 text-red-700'}`}>
|
| 739 |
+
Hacim
|
| 740 |
</span>
|
| 741 |
<span className={`text-xs px-2 py-0.5 rounded-full ${stock.gates.volatility_ok ? 'bg-green-100 text-green-700' : 'bg-red-100 text-red-700'}`}>
|
| 742 |
+
Volatilite
|
| 743 |
</span>
|
| 744 |
</div>
|
| 745 |
|
| 746 |
{(stock.gates.reasons || []).includes('backend_endpoint_missing:scan-signals') && (
|
| 747 |
<p className="mt-2 text-xs text-amber-700">
|
| 748 |
+
Teknik skorlar şu an alınamıyor (backend <span className="font-mono">/api/scan-signals</span> yok).
|
| 749 |
</p>
|
| 750 |
)}
|
| 751 |
</div>
|
|
|
|
| 753 |
|
| 754 |
<div className="grid grid-cols-2 gap-2 pt-2 border-t">
|
| 755 |
<div>
|
| 756 |
+
<p className="text-xs text-gray-600">Teknik Trend</p>
|
| 757 |
<p className="font-semibold text-sm">{formatScorePct(stock.trend_score)}</p>
|
| 758 |
</div>
|
| 759 |
<div>
|
| 760 |
+
<p className="text-xs text-gray-600">Teknik Momentum</p>
|
| 761 |
<p className="font-semibold text-sm">{formatScorePct(stock.momentum_score)}</p>
|
| 762 |
</div>
|
| 763 |
</div>
|
|
|
|
| 873 |
<div className="pt-2 border-t text-xs space-y-1">
|
| 874 |
{stock.prediction_7d !== undefined && stock.prediction_7d !== null && (
|
| 875 |
<div className="flex justify-between">
|
| 876 |
+
<span className="text-gray-600">7g tahmini:</span>
|
| 877 |
<span className={`font-semibold ${
|
| 878 |
(stock.prediction_7d ?? 0) > (stock.current_price ?? 0) ? 'text-green-600' : 'text-red-600'
|
| 879 |
}`}>
|
| 880 |
+
₺{(stock.prediction_7d ?? 0).toFixed(2)}
|
| 881 |
</span>
|
| 882 |
</div>
|
| 883 |
)}
|
| 884 |
{stock.prediction_30d !== undefined && stock.prediction_30d !== null && (
|
| 885 |
<div className="flex justify-between">
|
| 886 |
+
<span className="text-gray-600">30g tahmini:</span>
|
| 887 |
<span className={`font-semibold ${
|
| 888 |
(stock.prediction_30d ?? 0) > (stock.current_price ?? 0) ? 'text-green-600' : 'text-red-600'
|
| 889 |
}`}>
|
| 890 |
+
₺{(stock.prediction_30d ?? 0).toFixed(2)}
|
| 891 |
</span>
|
| 892 |
</div>
|
| 893 |
)}
|
huggingface-space/nextjs-app/src/app/page.tsx
CHANGED
|
@@ -5,7 +5,7 @@ import TopMLPredictions from '@/components/TopMLPredictions'
|
|
| 5 |
import Link from 'next/link'
|
| 6 |
import {
|
| 7 |
TrendingUp, BarChart3, Brain, Zap, Target, Search,
|
| 8 |
-
Activity, Newspaper, Building2, Briefcase, ArrowRight
|
| 9 |
} from 'lucide-react'
|
| 10 |
|
| 11 |
export default function Home() {
|
|
@@ -59,12 +59,6 @@ export default function Home() {
|
|
| 59 |
title="BIST100"
|
| 60 |
color="indigo"
|
| 61 |
/>
|
| 62 |
-
<QuickAccessCard
|
| 63 |
-
href="/us-market"
|
| 64 |
-
icon={<Globe className="h-5 w-5" />}
|
| 65 |
-
title="US Equities"
|
| 66 |
-
color="teal"
|
| 67 |
-
/>
|
| 68 |
</div>
|
| 69 |
|
| 70 |
<MarketOverview />
|
|
@@ -165,7 +159,6 @@ function QuickAccessCard({
|
|
| 165 |
pink: 'bg-pink-50 text-pink-600 hover:bg-pink-100 border-pink-200',
|
| 166 |
orange: 'bg-orange-50 text-orange-600 hover:bg-orange-100 border-orange-200',
|
| 167 |
indigo: 'bg-indigo-50 text-indigo-600 hover:bg-indigo-100 border-indigo-200',
|
| 168 |
-
teal: 'bg-teal-50 text-teal-600 hover:bg-teal-100 border-teal-200',
|
| 169 |
}
|
| 170 |
|
| 171 |
return (
|
|
|
|
| 5 |
import Link from 'next/link'
|
| 6 |
import {
|
| 7 |
TrendingUp, BarChart3, Brain, Zap, Target, Search,
|
| 8 |
+
Activity, Newspaper, Building2, Briefcase, ArrowRight
|
| 9 |
} from 'lucide-react'
|
| 10 |
|
| 11 |
export default function Home() {
|
|
|
|
| 59 |
title="BIST100"
|
| 60 |
color="indigo"
|
| 61 |
/>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
</div>
|
| 63 |
|
| 64 |
<MarketOverview />
|
|
|
|
| 159 |
pink: 'bg-pink-50 text-pink-600 hover:bg-pink-100 border-pink-200',
|
| 160 |
orange: 'bg-orange-50 text-orange-600 hover:bg-orange-100 border-orange-200',
|
| 161 |
indigo: 'bg-indigo-50 text-indigo-600 hover:bg-indigo-100 border-indigo-200',
|
|
|
|
| 162 |
}
|
| 163 |
|
| 164 |
return (
|
huggingface-space/nextjs-app/src/app/stocks/page.tsx
CHANGED
|
@@ -7,7 +7,6 @@ import Link from 'next/link';
|
|
| 7 |
import { fetchJson } from '@/lib/http';
|
| 8 |
import { appendAnalysisRecord } from '@/lib/analysis-history';
|
| 9 |
import { useAuth } from '@/contexts/AuthContext';
|
| 10 |
-
import { useMarket } from '@/contexts/MarketContext';
|
| 11 |
import { logger } from '@/lib/logger';
|
| 12 |
|
| 13 |
|
|
@@ -68,7 +67,6 @@ interface StockAnalysis {
|
|
| 68 |
|
| 69 |
export default function StocksPage() {
|
| 70 |
const { user } = useAuth();
|
| 71 |
-
const { market } = useMarket();
|
| 72 |
const [symbol, setSymbol] = useState('');
|
| 73 |
const [analysis, setAnalysis] = useState<StockAnalysis | null>(null);
|
| 74 |
const [loading, setLoading] = useState(false);
|
|
@@ -77,28 +75,25 @@ export default function StocksPage() {
|
|
| 77 |
const [period, setPeriod] = useState('6mo');
|
| 78 |
const [quickSymbols, setQuickSymbols] = useState<string[]>([]);
|
| 79 |
|
| 80 |
-
const isUS = market === 'us';
|
| 81 |
-
const currencySymbol = isUS ? '$' : '₺';
|
| 82 |
-
const universeName = isUS ? 'us_popular' : 'bist30';
|
| 83 |
-
|
| 84 |
useEffect(() => {
|
| 85 |
let mounted = true;
|
| 86 |
-
fetchJson<Record<string, unknown>>(`/api/universe?name=
|
| 87 |
.then((data) => {
|
| 88 |
const symbols = Array.isArray(data?.symbols) ? data.symbols : []
|
| 89 |
if (mounted) setQuickSymbols(symbols.slice(0, 10))
|
| 90 |
})
|
| 91 |
.catch(() => {
|
|
|
|
| 92 |
if (mounted) setQuickSymbols([])
|
| 93 |
})
|
| 94 |
return () => {
|
| 95 |
mounted = false
|
| 96 |
}
|
| 97 |
-
}, [
|
| 98 |
|
| 99 |
const handleAnalyze = async () => {
|
| 100 |
if (!symbol.trim()) {
|
| 101 |
-
setError(
|
| 102 |
return;
|
| 103 |
}
|
| 104 |
|
|
@@ -116,14 +111,14 @@ export default function StocksPage() {
|
|
| 116 |
|
| 117 |
// Hisse verisini al (gerçek kaynak: /api/stock-data)
|
| 118 |
const stockRaw = await fetchJson<Record<string, unknown>>(
|
| 119 |
-
`/api/stock-data?symbol=${encodeURIComponent(sym)}&period=${encodeURIComponent(period)}&interval=1d
|
| 120 |
{ method: 'GET' },
|
| 121 |
{ timeoutMs: 20000, retries: 1 }
|
| 122 |
);
|
| 123 |
|
| 124 |
// Teknik göstergeleri al (best-effort)
|
| 125 |
const techRaw = await fetchJson<Record<string, unknown>>(
|
| 126 |
-
`/api/technical-analysis?symbol=${encodeURIComponent(sym)}
|
| 127 |
{ method: 'GET' },
|
| 128 |
{ timeoutMs: 20000, retries: 1 }
|
| 129 |
).catch(() => null);
|
|
@@ -141,7 +136,6 @@ export default function StocksPage() {
|
|
| 141 |
symbols: [symbol.toUpperCase()],
|
| 142 |
days_ahead: predictionDays,
|
| 143 |
model: 'ensemble',
|
| 144 |
-
market,
|
| 145 |
},
|
| 146 |
}
|
| 147 |
);
|
|
@@ -179,29 +173,26 @@ export default function StocksPage() {
|
|
| 179 |
let techSignal: 'BUY' | 'SELL' | 'HOLD' = 'HOLD'
|
| 180 |
try {
|
| 181 |
const scanResp = await fetchJson<Record<string, unknown>>(
|
| 182 |
-
|
| 183 |
{ method: 'POST' },
|
| 184 |
{
|
| 185 |
timeoutMs: 30000,
|
| 186 |
retries: 0,
|
| 187 |
jsonBody: {
|
| 188 |
symbols: [sym],
|
| 189 |
-
|
|
|
|
|
|
|
| 190 |
},
|
| 191 |
}
|
| 192 |
)
|
| 193 |
-
const items
|
| 194 |
-
? ((isUS ? scanResp.signals : scanResp.data) as Record<string, unknown>[])
|
| 195 |
-
: []
|
| 196 |
const item = items.find((x: Record<string, unknown>) => String(x?.symbol || '').toUpperCase() === sym)
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
requiredOk = String(item?.signal || '').toUpperCase() === 'BUY'
|
| 200 |
-
} else if (gates && typeof gates.required_ok === 'boolean') {
|
| 201 |
-
requiredOk = gates.required_ok as boolean
|
| 202 |
}
|
| 203 |
-
if (item?.technical_signal
|
| 204 |
-
const ts = String(item.technical_signal
|
| 205 |
if (ts === 'BUY' || ts === 'SELL') techSignal = ts
|
| 206 |
}
|
| 207 |
} catch (e) {
|
|
@@ -337,7 +328,7 @@ export default function StocksPage() {
|
|
| 337 |
setAnalysis(analysisResult);
|
| 338 |
} catch (err) {
|
| 339 |
console.error('Analysis error:', err);
|
| 340 |
-
setError(err instanceof Error ? err.message :
|
| 341 |
} finally {
|
| 342 |
setLoading(false);
|
| 343 |
}
|
|
@@ -363,12 +354,10 @@ export default function StocksPage() {
|
|
| 363 |
<div className="mb-6">
|
| 364 |
<div className="flex items-center gap-3 mb-2">
|
| 365 |
<BarChart3 className="w-8 h-8 text-blue-600" />
|
| 366 |
-
<h1 className="text-3xl font-bold text-gray-900">
|
| 367 |
-
{isUS ? 'Stock Technical Analysis' : 'Hisse Senedi Teknik Analizi'}
|
| 368 |
-
</h1>
|
| 369 |
</div>
|
| 370 |
<p className="text-gray-600">
|
| 371 |
-
|
| 372 |
</p>
|
| 373 |
</div>
|
| 374 |
|
|
@@ -376,9 +365,7 @@ export default function StocksPage() {
|
|
| 376 |
<div className="bg-white rounded-lg shadow-sm p-6 mb-6">
|
| 377 |
<div className="grid grid-cols-1 md:grid-cols-5 gap-4">
|
| 378 |
<div className="md:col-span-2">
|
| 379 |
-
<label htmlFor="stock-symbol" className="block text-sm font-medium text-gray-700 mb-1">
|
| 380 |
-
{isUS ? 'Stock Symbol' : 'Hisse Kodu'}
|
| 381 |
-
</label>
|
| 382 |
<div className="relative">
|
| 383 |
<input
|
| 384 |
id="stock-symbol"
|
|
@@ -386,29 +373,28 @@ export default function StocksPage() {
|
|
| 386 |
value={symbol}
|
| 387 |
onChange={(e) => setSymbol(e.target.value.toUpperCase())}
|
| 388 |
onKeyPress={(e) => e.key === 'Enter' && handleAnalyze()}
|
| 389 |
-
placeholder=
|
| 390 |
className="w-full px-4 py-2 border border-gray-300 rounded-md focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
| 391 |
/>
|
| 392 |
</div>
|
| 393 |
</div>
|
| 394 |
|
| 395 |
<div>
|
| 396 |
-
<label htmlFor="stock-period" className="block text-sm font-medium text-gray-700 mb-1">
|
| 397 |
<select
|
| 398 |
id="stock-period"
|
| 399 |
value={period}
|
| 400 |
onChange={(e) => setPeriod(e.target.value)}
|
| 401 |
className="w-full px-3 py-2 border border-gray-300 rounded-md"
|
| 402 |
>
|
| 403 |
-
<option value="1mo">
|
| 404 |
-
<option value="3mo">
|
| 405 |
-
<option value="6mo">
|
| 406 |
-
<option value="1y">
|
| 407 |
-
<option value="2y">
|
| 408 |
</select>
|
| 409 |
</div>
|
| 410 |
|
| 411 |
-
{!isUS && (
|
| 412 |
<div>
|
| 413 |
<label htmlFor="stock-prediction" className="block text-sm font-medium text-gray-700 mb-1">Tahmin Süresi</label>
|
| 414 |
<select
|
|
@@ -425,7 +411,6 @@ export default function StocksPage() {
|
|
| 425 |
<option value={30}>30 Gün</option>
|
| 426 |
</select>
|
| 427 |
</div>
|
| 428 |
-
)}
|
| 429 |
|
| 430 |
<div className="flex items-end">
|
| 431 |
<button
|
|
@@ -436,12 +421,12 @@ export default function StocksPage() {
|
|
| 436 |
{loading ? (
|
| 437 |
<>
|
| 438 |
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-white"></div>
|
| 439 |
-
|
| 440 |
</>
|
| 441 |
) : (
|
| 442 |
<>
|
| 443 |
<Search className="w-4 h-4" />
|
| 444 |
-
|
| 445 |
</>
|
| 446 |
)}
|
| 447 |
</button>
|
|
@@ -460,18 +445,14 @@ export default function StocksPage() {
|
|
| 460 |
{!analysis && !loading && (
|
| 461 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 462 |
<BarChart3 className="w-16 h-16 text-gray-400 mx-auto mb-4" />
|
| 463 |
-
<h3 className="text-lg font-semibold text-gray-900 mb-2">
|
| 464 |
-
{isUS ? 'Stock Analysis' : 'Hisse Analizi'}
|
| 465 |
-
</h3>
|
| 466 |
<p className="text-gray-600 mb-4">
|
| 467 |
-
|
| 468 |
</p>
|
| 469 |
|
| 470 |
{/* Hızlı Erişim */}
|
| 471 |
<div className="mt-6">
|
| 472 |
-
<p className="text-sm text-gray-500 mb-3">
|
| 473 |
-
{isUS ? 'Popular Stocks:' : 'Popüler Hisseler:'}
|
| 474 |
-
</p>
|
| 475 |
<div className="flex flex-wrap justify-center gap-2">
|
| 476 |
{quickSymbols.map((s) => (
|
| 477 |
<button
|
|
@@ -487,7 +468,7 @@ export default function StocksPage() {
|
|
| 487 |
))}
|
| 488 |
</div>
|
| 489 |
{quickSymbols.length === 0 && (
|
| 490 |
-
<p className="text-xs text-gray-400 mt-2">
|
| 491 |
)}
|
| 492 |
</div>
|
| 493 |
</div>
|
|
@@ -496,8 +477,8 @@ export default function StocksPage() {
|
|
| 496 |
{loading && (
|
| 497 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 498 |
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto mb-4"></div>
|
| 499 |
-
<p className="text-gray-600">{symbol}
|
| 500 |
-
<p className="text-sm text-gray-500 mt-2">
|
| 501 |
</div>
|
| 502 |
)}
|
| 503 |
|
|
@@ -521,13 +502,13 @@ export default function StocksPage() {
|
|
| 521 |
analysis.risk_level === 'high' ? 'bg-red-50 text-red-700' :
|
| 522 |
'bg-yellow-50 text-yellow-700'
|
| 523 |
}`}>
|
| 524 |
-
Risk: {analysis.risk_level === 'low' ?
|
| 525 |
</span>
|
| 526 |
</div>
|
| 527 |
<p className="text-gray-600">{analysis.company_name}</p>
|
| 528 |
</div>
|
| 529 |
<div className="text-right">
|
| 530 |
-
<p className="text-3xl font-bold text-gray-900">{
|
| 531 |
<p className={`text-lg font-semibold ${
|
| 532 |
(analysis.change_percent ?? 0) >= 0 ? 'text-green-600' : 'text-red-600'
|
| 533 |
}`}>
|
|
@@ -539,29 +520,29 @@ export default function StocksPage() {
|
|
| 539 |
{/* Temel Metrikler */}
|
| 540 |
<div className="grid grid-cols-2 md:grid-cols-6 gap-4">
|
| 541 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 542 |
-
<p className="text-xs text-gray-500">
|
| 543 |
-
<p className="text-lg font-semibold">{
|
| 544 |
</div>
|
| 545 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 546 |
-
<p className="text-xs text-gray-500">
|
| 547 |
-
<p className="text-lg font-semibold">{
|
| 548 |
</div>
|
| 549 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 550 |
-
<p className="text-xs text-gray-500">
|
| 551 |
<p className="text-lg font-semibold">{((analysis.volume ?? 0) / 1000000).toFixed(2)}M</p>
|
| 552 |
</div>
|
| 553 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 554 |
-
<p className="text-xs text-gray-500">
|
| 555 |
<p className="text-lg font-semibold">{(analysis.volatility ?? 0).toFixed(2)}%</p>
|
| 556 |
</div>
|
| 557 |
{analysis.pe_ratio && (
|
| 558 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 559 |
-
<p className="text-xs text-gray-500">
|
| 560 |
<p className="text-lg font-semibold">{(analysis.pe_ratio ?? 0).toFixed(2)}</p>
|
| 561 |
</div>
|
| 562 |
)}
|
| 563 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 564 |
-
<p className="text-xs text-gray-500">
|
| 565 |
<p className="text-lg font-semibold">{typeof analysis.signals?.signal_strength === 'number' ? `${analysis.signals.signal_strength.toFixed(0)}%` : '—'}</p>
|
| 566 |
</div>
|
| 567 |
</div>
|
|
@@ -569,24 +550,19 @@ export default function StocksPage() {
|
|
| 569 |
|
| 570 |
{/* ML Tahmin ve Teknik Göstergeler */}
|
| 571 |
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
| 572 |
-
{/* ML
|
| 573 |
<div className="bg-gradient-to-br from-purple-50 to-blue-50 rounded-lg p-6 border border-purple-200">
|
| 574 |
<div className="flex items-center gap-2 mb-4">
|
| 575 |
<Zap className="w-5 h-5 text-purple-600" />
|
| 576 |
-
<h3 className="font-semibold text-gray-900">
|
| 577 |
</div>
|
| 578 |
|
| 579 |
-
{
|
| 580 |
-
<div className="text-center py-4">
|
| 581 |
-
<p className="text-gray-500 text-sm">ML predictions are not available for US stocks.</p>
|
| 582 |
-
<p className="text-gray-400 text-xs mt-1">Use the ML Scanner for US signal scoring.</p>
|
| 583 |
-
</div>
|
| 584 |
-
) : analysis.ml_prediction ? (
|
| 585 |
<div className="space-y-4">
|
| 586 |
<div className="flex items-center justify-between">
|
| 587 |
<span className="text-gray-600">Tahmin Edilen Fiyat:</span>
|
| 588 |
<span className="text-xl font-bold text-gray-900">
|
| 589 |
-
{
|
| 590 |
</span>
|
| 591 |
</div>
|
| 592 |
<div className="flex items-center justify-between">
|
|
@@ -625,7 +601,7 @@ export default function StocksPage() {
|
|
| 625 |
</div>
|
| 626 |
</div>
|
| 627 |
) : (
|
| 628 |
-
<p className="text-gray-500">
|
| 629 |
)}
|
| 630 |
</div>
|
| 631 |
|
|
@@ -633,48 +609,48 @@ export default function StocksPage() {
|
|
| 633 |
<div className="bg-white rounded-lg shadow-sm p-6">
|
| 634 |
<div className="flex items-center gap-2 mb-4">
|
| 635 |
<TrendingUp className="w-5 h-5 text-blue-600" />
|
| 636 |
-
<h3 className="font-semibold text-gray-900">
|
| 637 |
</div>
|
| 638 |
|
| 639 |
<div className="space-y-4">
|
| 640 |
<div className="flex items-center justify-between">
|
| 641 |
-
<span className="text-gray-600">
|
| 642 |
<div className="flex items-center gap-2">
|
| 643 |
{getTrendIcon(analysis.signals.trend_short)}
|
| 644 |
<span className={`font-semibold ${
|
| 645 |
analysis.signals.trend_short === 'up' ? 'text-green-600' :
|
| 646 |
analysis.signals.trend_short === 'down' ? 'text-red-600' : 'text-yellow-600'
|
| 647 |
}`}>
|
| 648 |
-
{analysis.signals.trend_short === 'up' ?
|
| 649 |
-
analysis.signals.trend_short === 'down' ?
|
| 650 |
</span>
|
| 651 |
</div>
|
| 652 |
</div>
|
| 653 |
|
| 654 |
<div className="flex items-center justify-between">
|
| 655 |
-
<span className="text-gray-600">
|
| 656 |
<div className="flex items-center gap-2">
|
| 657 |
{getTrendIcon(analysis.signals.trend_medium)}
|
| 658 |
<span className={`font-semibold ${
|
| 659 |
analysis.signals.trend_medium === 'up' ? 'text-green-600' :
|
| 660 |
analysis.signals.trend_medium === 'down' ? 'text-red-600' : 'text-yellow-600'
|
| 661 |
}`}>
|
| 662 |
-
{analysis.signals.trend_medium === 'up' ?
|
| 663 |
-
analysis.signals.trend_medium === 'down' ?
|
| 664 |
</span>
|
| 665 |
</div>
|
| 666 |
</div>
|
| 667 |
|
| 668 |
<div className="flex items-center justify-between">
|
| 669 |
-
<span className="text-gray-600">
|
| 670 |
<div className="flex items-center gap-2">
|
| 671 |
{getTrendIcon(analysis.signals.trend_long)}
|
| 672 |
<span className={`font-semibold ${
|
| 673 |
analysis.signals.trend_long === 'up' ? 'text-green-600' :
|
| 674 |
analysis.signals.trend_long === 'down' ? 'text-red-600' : 'text-yellow-600'
|
| 675 |
}`}>
|
| 676 |
-
{analysis.signals.trend_long === 'up' ?
|
| 677 |
-
analysis.signals.trend_long === 'down' ?
|
| 678 |
</span>
|
| 679 |
</div>
|
| 680 |
</div>
|
|
@@ -684,7 +660,7 @@ export default function StocksPage() {
|
|
| 684 |
|
| 685 |
{/* Teknik Göstergeler Detay */}
|
| 686 |
<div className="bg-white rounded-lg shadow-sm p-6">
|
| 687 |
-
<h3 className="font-semibold text-gray-900 mb-4">
|
| 688 |
|
| 689 |
<div className="grid grid-cols-2 md:grid-cols-4 lg:grid-cols-6 gap-4">
|
| 690 |
<div className="p-3 rounded-lg border">
|
|
@@ -693,8 +669,8 @@ export default function StocksPage() {
|
|
| 693 |
{typeof analysis.indicators?.rsi === 'number' ? analysis.indicators.rsi.toFixed(1) : '—'}
|
| 694 |
</p>
|
| 695 |
<p className="text-xs text-gray-500">
|
| 696 |
-
{analysis.signals?.rsi_signal === 'oversold' ?
|
| 697 |
-
analysis.signals?.rsi_signal === 'overbought' ?
|
| 698 |
</p>
|
| 699 |
</div>
|
| 700 |
|
|
@@ -706,18 +682,18 @@ export default function StocksPage() {
|
|
| 706 |
{typeof analysis.indicators?.macd === 'number' ? analysis.indicators.macd.toFixed(3) : '—'}
|
| 707 |
</p>
|
| 708 |
<p className="text-xs text-gray-500">
|
| 709 |
-
{analysis.signals?.macd_signal === 'bullish' ?
|
| 710 |
</p>
|
| 711 |
</div>
|
| 712 |
|
| 713 |
<div className="p-3 rounded-lg border">
|
| 714 |
<p className="text-xs text-gray-500 mb-1">SMA 20</p>
|
| 715 |
-
<p className="text-lg font-bold">{typeof analysis.indicators?.sma_20 === 'number' ? `${
|
| 716 |
</div>
|
| 717 |
|
| 718 |
<div className="p-3 rounded-lg border">
|
| 719 |
<p className="text-xs text-gray-500 mb-1">SMA 50</p>
|
| 720 |
-
<p className="text-lg font-bold">{typeof analysis.indicators?.sma_50 === 'number' ? `${
|
| 721 |
</div>
|
| 722 |
|
| 723 |
<div className="p-3 rounded-lg border">
|
|
@@ -729,7 +705,7 @@ export default function StocksPage() {
|
|
| 729 |
<p className="text-xs text-gray-500 mb-1">ADX</p>
|
| 730 |
<p className="text-lg font-bold">{typeof analysis.indicators?.adx === 'number' ? analysis.indicators.adx.toFixed(1) : '—'}</p>
|
| 731 |
<p className="text-xs text-gray-500">
|
| 732 |
-
{typeof analysis.indicators?.adx === 'number' ? (analysis.indicators.adx > 25 ?
|
| 733 |
</p>
|
| 734 |
</div>
|
| 735 |
</div>
|
|
@@ -739,11 +715,11 @@ export default function StocksPage() {
|
|
| 739 |
typeof analysis.indicators?.bollinger_middle === 'number' &&
|
| 740 |
typeof analysis.indicators?.bollinger_upper === 'number' && (
|
| 741 |
<div className="mt-4 p-4 bg-gray-50 rounded-lg">
|
| 742 |
-
<p className="text-sm font-medium text-gray-700 mb-2">
|
| 743 |
<div className="flex items-center justify-between text-sm">
|
| 744 |
-
<span className="text-red-600">
|
| 745 |
-
<span className="text-gray-600">
|
| 746 |
-
<span className="text-green-600">
|
| 747 |
</div>
|
| 748 |
<div className="mt-2 h-2 bg-gray-200 rounded-full relative">
|
| 749 |
<div
|
|
@@ -772,7 +748,7 @@ export default function StocksPage() {
|
|
| 772 |
className="inline-flex items-center gap-2 bg-blue-600 text-white px-6 py-3 rounded-lg hover:bg-blue-700 font-semibold"
|
| 773 |
>
|
| 774 |
<BarChart3 className="w-5 h-5" />
|
| 775 |
-
|
| 776 |
</Link>
|
| 777 |
</div>
|
| 778 |
</div>
|
|
|
|
| 7 |
import { fetchJson } from '@/lib/http';
|
| 8 |
import { appendAnalysisRecord } from '@/lib/analysis-history';
|
| 9 |
import { useAuth } from '@/contexts/AuthContext';
|
|
|
|
| 10 |
import { logger } from '@/lib/logger';
|
| 11 |
|
| 12 |
|
|
|
|
| 67 |
|
| 68 |
export default function StocksPage() {
|
| 69 |
const { user } = useAuth();
|
|
|
|
| 70 |
const [symbol, setSymbol] = useState('');
|
| 71 |
const [analysis, setAnalysis] = useState<StockAnalysis | null>(null);
|
| 72 |
const [loading, setLoading] = useState(false);
|
|
|
|
| 75 |
const [period, setPeriod] = useState('6mo');
|
| 76 |
const [quickSymbols, setQuickSymbols] = useState<string[]>([]);
|
| 77 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
useEffect(() => {
|
| 79 |
let mounted = true;
|
| 80 |
+
fetchJson<Record<string, unknown>>(`/api/universe?name=bist30`, { method: 'GET' }, { timeoutMs: 20000, retries: 1 })
|
| 81 |
.then((data) => {
|
| 82 |
const symbols = Array.isArray(data?.symbols) ? data.symbols : []
|
| 83 |
if (mounted) setQuickSymbols(symbols.slice(0, 10))
|
| 84 |
})
|
| 85 |
.catch(() => {
|
| 86 |
+
// No toy fallback
|
| 87 |
if (mounted) setQuickSymbols([])
|
| 88 |
})
|
| 89 |
return () => {
|
| 90 |
mounted = false
|
| 91 |
}
|
| 92 |
+
}, [])
|
| 93 |
|
| 94 |
const handleAnalyze = async () => {
|
| 95 |
if (!symbol.trim()) {
|
| 96 |
+
setError('Lütfen hisse kodu girin');
|
| 97 |
return;
|
| 98 |
}
|
| 99 |
|
|
|
|
| 111 |
|
| 112 |
// Hisse verisini al (gerçek kaynak: /api/stock-data)
|
| 113 |
const stockRaw = await fetchJson<Record<string, unknown>>(
|
| 114 |
+
`/api/stock-data?symbol=${encodeURIComponent(sym)}&period=${encodeURIComponent(period)}&interval=1d`,
|
| 115 |
{ method: 'GET' },
|
| 116 |
{ timeoutMs: 20000, retries: 1 }
|
| 117 |
);
|
| 118 |
|
| 119 |
// Teknik göstergeleri al (best-effort)
|
| 120 |
const techRaw = await fetchJson<Record<string, unknown>>(
|
| 121 |
+
`/api/technical-analysis?symbol=${encodeURIComponent(sym)}`,
|
| 122 |
{ method: 'GET' },
|
| 123 |
{ timeoutMs: 20000, retries: 1 }
|
| 124 |
).catch(() => null);
|
|
|
|
| 136 |
symbols: [symbol.toUpperCase()],
|
| 137 |
days_ahead: predictionDays,
|
| 138 |
model: 'ensemble',
|
|
|
|
| 139 |
},
|
| 140 |
}
|
| 141 |
);
|
|
|
|
| 173 |
let techSignal: 'BUY' | 'SELL' | 'HOLD' = 'HOLD'
|
| 174 |
try {
|
| 175 |
const scanResp = await fetchJson<Record<string, unknown>>(
|
| 176 |
+
`/api/scan-signals`,
|
| 177 |
{ method: 'POST' },
|
| 178 |
{
|
| 179 |
timeoutMs: 30000,
|
| 180 |
retries: 0,
|
| 181 |
jsonBody: {
|
| 182 |
symbols: [sym],
|
| 183 |
+
period,
|
| 184 |
+
interval: '1d',
|
| 185 |
+
limit: 1,
|
| 186 |
},
|
| 187 |
}
|
| 188 |
)
|
| 189 |
+
const items = Array.isArray(scanResp?.data) ? scanResp.data : []
|
|
|
|
|
|
|
| 190 |
const item = items.find((x: Record<string, unknown>) => String(x?.symbol || '').toUpperCase() === sym)
|
| 191 |
+
if (item?.gates && typeof item.gates.required_ok === 'boolean') {
|
| 192 |
+
requiredOk = item.gates.required_ok
|
|
|
|
|
|
|
|
|
|
| 193 |
}
|
| 194 |
+
if (item?.technical_signal) {
|
| 195 |
+
const ts = String(item.technical_signal).toUpperCase().trim()
|
| 196 |
if (ts === 'BUY' || ts === 'SELL') techSignal = ts
|
| 197 |
}
|
| 198 |
} catch (e) {
|
|
|
|
| 328 |
setAnalysis(analysisResult);
|
| 329 |
} catch (err) {
|
| 330 |
console.error('Analysis error:', err);
|
| 331 |
+
setError(err instanceof Error ? err.message : 'Analiz yapılırken hata oluştu');
|
| 332 |
} finally {
|
| 333 |
setLoading(false);
|
| 334 |
}
|
|
|
|
| 354 |
<div className="mb-6">
|
| 355 |
<div className="flex items-center gap-3 mb-2">
|
| 356 |
<BarChart3 className="w-8 h-8 text-blue-600" />
|
| 357 |
+
<h1 className="text-3xl font-bold text-gray-900">Hisse Senedi Teknik Analizi</h1>
|
|
|
|
|
|
|
| 358 |
</div>
|
| 359 |
<p className="text-gray-600">
|
| 360 |
+
Hisse senedi detaylı teknik analizi, göstergeler ve ML tahminleri
|
| 361 |
</p>
|
| 362 |
</div>
|
| 363 |
|
|
|
|
| 365 |
<div className="bg-white rounded-lg shadow-sm p-6 mb-6">
|
| 366 |
<div className="grid grid-cols-1 md:grid-cols-5 gap-4">
|
| 367 |
<div className="md:col-span-2">
|
| 368 |
+
<label htmlFor="stock-symbol" className="block text-sm font-medium text-gray-700 mb-1">Hisse Kodu</label>
|
|
|
|
|
|
|
| 369 |
<div className="relative">
|
| 370 |
<input
|
| 371 |
id="stock-symbol"
|
|
|
|
| 373 |
value={symbol}
|
| 374 |
onChange={(e) => setSymbol(e.target.value.toUpperCase())}
|
| 375 |
onKeyPress={(e) => e.key === 'Enter' && handleAnalyze()}
|
| 376 |
+
placeholder="Örn: THYAO, GARAN, ASELS"
|
| 377 |
className="w-full px-4 py-2 border border-gray-300 rounded-md focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
| 378 |
/>
|
| 379 |
</div>
|
| 380 |
</div>
|
| 381 |
|
| 382 |
<div>
|
| 383 |
+
<label htmlFor="stock-period" className="block text-sm font-medium text-gray-700 mb-1">Veri Periyodu</label>
|
| 384 |
<select
|
| 385 |
id="stock-period"
|
| 386 |
value={period}
|
| 387 |
onChange={(e) => setPeriod(e.target.value)}
|
| 388 |
className="w-full px-3 py-2 border border-gray-300 rounded-md"
|
| 389 |
>
|
| 390 |
+
<option value="1mo">1 Ay</option>
|
| 391 |
+
<option value="3mo">3 Ay</option>
|
| 392 |
+
<option value="6mo">6 Ay</option>
|
| 393 |
+
<option value="1y">1 Yıl</option>
|
| 394 |
+
<option value="2y">2 Yıl</option>
|
| 395 |
</select>
|
| 396 |
</div>
|
| 397 |
|
|
|
|
| 398 |
<div>
|
| 399 |
<label htmlFor="stock-prediction" className="block text-sm font-medium text-gray-700 mb-1">Tahmin Süresi</label>
|
| 400 |
<select
|
|
|
|
| 411 |
<option value={30}>30 Gün</option>
|
| 412 |
</select>
|
| 413 |
</div>
|
|
|
|
| 414 |
|
| 415 |
<div className="flex items-end">
|
| 416 |
<button
|
|
|
|
| 421 |
{loading ? (
|
| 422 |
<>
|
| 423 |
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-white"></div>
|
| 424 |
+
Analiz Ediliyor...
|
| 425 |
</>
|
| 426 |
) : (
|
| 427 |
<>
|
| 428 |
<Search className="w-4 h-4" />
|
| 429 |
+
Analiz Et
|
| 430 |
</>
|
| 431 |
)}
|
| 432 |
</button>
|
|
|
|
| 445 |
{!analysis && !loading && (
|
| 446 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 447 |
<BarChart3 className="w-16 h-16 text-gray-400 mx-auto mb-4" />
|
| 448 |
+
<h3 className="text-lg font-semibold text-gray-900 mb-2">Hisse Analizi</h3>
|
|
|
|
|
|
|
| 449 |
<p className="text-gray-600 mb-4">
|
| 450 |
+
Hisse kodunu girerek detaylı teknik analiz ve ML tahminlerini görüntüleyin
|
| 451 |
</p>
|
| 452 |
|
| 453 |
{/* Hızlı Erişim */}
|
| 454 |
<div className="mt-6">
|
| 455 |
+
<p className="text-sm text-gray-500 mb-3">Popüler Hisseler:</p>
|
|
|
|
|
|
|
| 456 |
<div className="flex flex-wrap justify-center gap-2">
|
| 457 |
{quickSymbols.map((s) => (
|
| 458 |
<button
|
|
|
|
| 468 |
))}
|
| 469 |
</div>
|
| 470 |
{quickSymbols.length === 0 && (
|
| 471 |
+
<p className="text-xs text-gray-400 mt-2">Hızlı liste şu an alınamadı.</p>
|
| 472 |
)}
|
| 473 |
</div>
|
| 474 |
</div>
|
|
|
|
| 477 |
{loading && (
|
| 478 |
<div className="bg-white rounded-lg shadow-sm p-12 text-center">
|
| 479 |
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto mb-4"></div>
|
| 480 |
+
<p className="text-gray-600">{symbol} analiz ediliyor...</p>
|
| 481 |
+
<p className="text-sm text-gray-500 mt-2">Teknik göstergeler hesaplanıyor ve ML tahminleri alınıyor...</p>
|
| 482 |
</div>
|
| 483 |
)}
|
| 484 |
|
|
|
|
| 502 |
analysis.risk_level === 'high' ? 'bg-red-50 text-red-700' :
|
| 503 |
'bg-yellow-50 text-yellow-700'
|
| 504 |
}`}>
|
| 505 |
+
Risk: {analysis.risk_level === 'low' ? 'Düşük' : analysis.risk_level === 'high' ? 'Yüksek' : 'Orta'}
|
| 506 |
</span>
|
| 507 |
</div>
|
| 508 |
<p className="text-gray-600">{analysis.company_name}</p>
|
| 509 |
</div>
|
| 510 |
<div className="text-right">
|
| 511 |
+
<p className="text-3xl font-bold text-gray-900">₺{(analysis.current_price ?? 0).toFixed(2)}</p>
|
| 512 |
<p className={`text-lg font-semibold ${
|
| 513 |
(analysis.change_percent ?? 0) >= 0 ? 'text-green-600' : 'text-red-600'
|
| 514 |
}`}>
|
|
|
|
| 520 |
{/* Temel Metrikler */}
|
| 521 |
<div className="grid grid-cols-2 md:grid-cols-6 gap-4">
|
| 522 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 523 |
+
<p className="text-xs text-gray-500">52H Yüksek</p>
|
| 524 |
+
<p className="text-lg font-semibold">₺{(analysis.high_52w ?? 0).toFixed(2)}</p>
|
| 525 |
</div>
|
| 526 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 527 |
+
<p className="text-xs text-gray-500">52H Düşük</p>
|
| 528 |
+
<p className="text-lg font-semibold">₺{(analysis.low_52w ?? 0).toFixed(2)}</p>
|
| 529 |
</div>
|
| 530 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 531 |
+
<p className="text-xs text-gray-500">Hacim</p>
|
| 532 |
<p className="text-lg font-semibold">{((analysis.volume ?? 0) / 1000000).toFixed(2)}M</p>
|
| 533 |
</div>
|
| 534 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 535 |
+
<p className="text-xs text-gray-500">Volatilite</p>
|
| 536 |
<p className="text-lg font-semibold">{(analysis.volatility ?? 0).toFixed(2)}%</p>
|
| 537 |
</div>
|
| 538 |
{analysis.pe_ratio && (
|
| 539 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 540 |
+
<p className="text-xs text-gray-500">F/K Oranı</p>
|
| 541 |
<p className="text-lg font-semibold">{(analysis.pe_ratio ?? 0).toFixed(2)}</p>
|
| 542 |
</div>
|
| 543 |
)}
|
| 544 |
<div className="bg-gray-50 p-3 rounded-lg">
|
| 545 |
+
<p className="text-xs text-gray-500">Sinyal Gücü</p>
|
| 546 |
<p className="text-lg font-semibold">{typeof analysis.signals?.signal_strength === 'number' ? `${analysis.signals.signal_strength.toFixed(0)}%` : '—'}</p>
|
| 547 |
</div>
|
| 548 |
</div>
|
|
|
|
| 550 |
|
| 551 |
{/* ML Tahmin ve Teknik Göstergeler */}
|
| 552 |
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
| 553 |
+
{/* ML Tahmin */}
|
| 554 |
<div className="bg-gradient-to-br from-purple-50 to-blue-50 rounded-lg p-6 border border-purple-200">
|
| 555 |
<div className="flex items-center gap-2 mb-4">
|
| 556 |
<Zap className="w-5 h-5 text-purple-600" />
|
| 557 |
+
<h3 className="font-semibold text-gray-900">ML Tahmin ({predictionDays} Gün)</h3>
|
| 558 |
</div>
|
| 559 |
|
| 560 |
+
{analysis.ml_prediction ? (
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 561 |
<div className="space-y-4">
|
| 562 |
<div className="flex items-center justify-between">
|
| 563 |
<span className="text-gray-600">Tahmin Edilen Fiyat:</span>
|
| 564 |
<span className="text-xl font-bold text-gray-900">
|
| 565 |
+
₺{(analysis.ml_prediction.predicted_price ?? 0).toFixed(2)}
|
| 566 |
</span>
|
| 567 |
</div>
|
| 568 |
<div className="flex items-center justify-between">
|
|
|
|
| 601 |
</div>
|
| 602 |
</div>
|
| 603 |
) : (
|
| 604 |
+
<p className="text-gray-500">ML tahmini alınamadı</p>
|
| 605 |
)}
|
| 606 |
</div>
|
| 607 |
|
|
|
|
| 609 |
<div className="bg-white rounded-lg shadow-sm p-6">
|
| 610 |
<div className="flex items-center gap-2 mb-4">
|
| 611 |
<TrendingUp className="w-5 h-5 text-blue-600" />
|
| 612 |
+
<h3 className="font-semibold text-gray-900">Trend Analizi</h3>
|
| 613 |
</div>
|
| 614 |
|
| 615 |
<div className="space-y-4">
|
| 616 |
<div className="flex items-center justify-between">
|
| 617 |
+
<span className="text-gray-600">Kısa Vadeli (SMA20):</span>
|
| 618 |
<div className="flex items-center gap-2">
|
| 619 |
{getTrendIcon(analysis.signals.trend_short)}
|
| 620 |
<span className={`font-semibold ${
|
| 621 |
analysis.signals.trend_short === 'up' ? 'text-green-600' :
|
| 622 |
analysis.signals.trend_short === 'down' ? 'text-red-600' : 'text-yellow-600'
|
| 623 |
}`}>
|
| 624 |
+
{analysis.signals.trend_short === 'up' ? 'Yükseliş' :
|
| 625 |
+
analysis.signals.trend_short === 'down' ? 'Düşüş' : 'Yatay'}
|
| 626 |
</span>
|
| 627 |
</div>
|
| 628 |
</div>
|
| 629 |
|
| 630 |
<div className="flex items-center justify-between">
|
| 631 |
+
<span className="text-gray-600">Orta Vadeli (SMA50):</span>
|
| 632 |
<div className="flex items-center gap-2">
|
| 633 |
{getTrendIcon(analysis.signals.trend_medium)}
|
| 634 |
<span className={`font-semibold ${
|
| 635 |
analysis.signals.trend_medium === 'up' ? 'text-green-600' :
|
| 636 |
analysis.signals.trend_medium === 'down' ? 'text-red-600' : 'text-yellow-600'
|
| 637 |
}`}>
|
| 638 |
+
{analysis.signals.trend_medium === 'up' ? 'Yükseliş' :
|
| 639 |
+
analysis.signals.trend_medium === 'down' ? 'Düşüş' : 'Yatay'}
|
| 640 |
</span>
|
| 641 |
</div>
|
| 642 |
</div>
|
| 643 |
|
| 644 |
<div className="flex items-center justify-between">
|
| 645 |
+
<span className="text-gray-600">Uzun Vadeli (SMA200):</span>
|
| 646 |
<div className="flex items-center gap-2">
|
| 647 |
{getTrendIcon(analysis.signals.trend_long)}
|
| 648 |
<span className={`font-semibold ${
|
| 649 |
analysis.signals.trend_long === 'up' ? 'text-green-600' :
|
| 650 |
analysis.signals.trend_long === 'down' ? 'text-red-600' : 'text-yellow-600'
|
| 651 |
}`}>
|
| 652 |
+
{analysis.signals.trend_long === 'up' ? 'Yükseliş' :
|
| 653 |
+
analysis.signals.trend_long === 'down' ? 'Düşüş' : 'Yatay'}
|
| 654 |
</span>
|
| 655 |
</div>
|
| 656 |
</div>
|
|
|
|
| 660 |
|
| 661 |
{/* Teknik Göstergeler Detay */}
|
| 662 |
<div className="bg-white rounded-lg shadow-sm p-6">
|
| 663 |
+
<h3 className="font-semibold text-gray-900 mb-4">Teknik Göstergeler</h3>
|
| 664 |
|
| 665 |
<div className="grid grid-cols-2 md:grid-cols-4 lg:grid-cols-6 gap-4">
|
| 666 |
<div className="p-3 rounded-lg border">
|
|
|
|
| 669 |
{typeof analysis.indicators?.rsi === 'number' ? analysis.indicators.rsi.toFixed(1) : '—'}
|
| 670 |
</p>
|
| 671 |
<p className="text-xs text-gray-500">
|
| 672 |
+
{analysis.signals?.rsi_signal === 'oversold' ? 'Aşırı Satım' :
|
| 673 |
+
analysis.signals?.rsi_signal === 'overbought' ? 'Aşırı Alım' : 'Nötr'}
|
| 674 |
</p>
|
| 675 |
</div>
|
| 676 |
|
|
|
|
| 682 |
{typeof analysis.indicators?.macd === 'number' ? analysis.indicators.macd.toFixed(3) : '—'}
|
| 683 |
</p>
|
| 684 |
<p className="text-xs text-gray-500">
|
| 685 |
+
{analysis.signals?.macd_signal === 'bullish' ? 'Yükseliş' : analysis.signals?.macd_signal === 'bearish' ? 'Düşüş' : 'Nötr'}
|
| 686 |
</p>
|
| 687 |
</div>
|
| 688 |
|
| 689 |
<div className="p-3 rounded-lg border">
|
| 690 |
<p className="text-xs text-gray-500 mb-1">SMA 20</p>
|
| 691 |
+
<p className="text-lg font-bold">{typeof analysis.indicators?.sma_20 === 'number' ? `₺${analysis.indicators.sma_20.toFixed(2)}` : '—'}</p>
|
| 692 |
</div>
|
| 693 |
|
| 694 |
<div className="p-3 rounded-lg border">
|
| 695 |
<p className="text-xs text-gray-500 mb-1">SMA 50</p>
|
| 696 |
+
<p className="text-lg font-bold">{typeof analysis.indicators?.sma_50 === 'number' ? `₺${analysis.indicators.sma_50.toFixed(2)}` : '—'}</p>
|
| 697 |
</div>
|
| 698 |
|
| 699 |
<div className="p-3 rounded-lg border">
|
|
|
|
| 705 |
<p className="text-xs text-gray-500 mb-1">ADX</p>
|
| 706 |
<p className="text-lg font-bold">{typeof analysis.indicators?.adx === 'number' ? analysis.indicators.adx.toFixed(1) : '—'}</p>
|
| 707 |
<p className="text-xs text-gray-500">
|
| 708 |
+
{typeof analysis.indicators?.adx === 'number' ? (analysis.indicators.adx > 25 ? 'Güçlü Trend' : 'Zayıf Trend') : '—'}
|
| 709 |
</p>
|
| 710 |
</div>
|
| 711 |
</div>
|
|
|
|
| 715 |
typeof analysis.indicators?.bollinger_middle === 'number' &&
|
| 716 |
typeof analysis.indicators?.bollinger_upper === 'number' && (
|
| 717 |
<div className="mt-4 p-4 bg-gray-50 rounded-lg">
|
| 718 |
+
<p className="text-sm font-medium text-gray-700 mb-2">Bollinger Bantları</p>
|
| 719 |
<div className="flex items-center justify-between text-sm">
|
| 720 |
+
<span className="text-red-600">Alt: ₺{analysis.indicators.bollinger_lower.toFixed(2)}</span>
|
| 721 |
+
<span className="text-gray-600">Orta: ₺{analysis.indicators.bollinger_middle.toFixed(2)}</span>
|
| 722 |
+
<span className="text-green-600">Üst: ₺{analysis.indicators.bollinger_upper.toFixed(2)}</span>
|
| 723 |
</div>
|
| 724 |
<div className="mt-2 h-2 bg-gray-200 rounded-full relative">
|
| 725 |
<div
|
|
|
|
| 748 |
className="inline-flex items-center gap-2 bg-blue-600 text-white px-6 py-3 rounded-lg hover:bg-blue-700 font-semibold"
|
| 749 |
>
|
| 750 |
<BarChart3 className="w-5 h-5" />
|
| 751 |
+
Detaylı Grafik ve Fiyat Geçmişi
|
| 752 |
</Link>
|
| 753 |
</div>
|
| 754 |
</div>
|
huggingface-space/nextjs-app/src/app/us-market/page.tsx
DELETED
|
@@ -1,556 +0,0 @@
|
|
| 1 |
-
'use client'
|
| 2 |
-
|
| 3 |
-
import { useState, useEffect, useCallback } from 'react'
|
| 4 |
-
import { fetchJson } from '@/lib/http'
|
| 5 |
-
import Link from 'next/link'
|
| 6 |
-
import {
|
| 7 |
-
TrendingUp,
|
| 8 |
-
TrendingDown,
|
| 9 |
-
DollarSign,
|
| 10 |
-
BarChart3,
|
| 11 |
-
Activity,
|
| 12 |
-
Target,
|
| 13 |
-
Clock,
|
| 14 |
-
RefreshCw,
|
| 15 |
-
Zap,
|
| 16 |
-
ChevronDown,
|
| 17 |
-
ChevronUp,
|
| 18 |
-
Bot,
|
| 19 |
-
Globe,
|
| 20 |
-
ArrowRight,
|
| 21 |
-
} from 'lucide-react'
|
| 22 |
-
|
| 23 |
-
// ─── Types ─────────────────────────────────
|
| 24 |
-
interface Portfolio {
|
| 25 |
-
cash: number
|
| 26 |
-
equity: number
|
| 27 |
-
positionCount: number
|
| 28 |
-
positions: Array<{
|
| 29 |
-
symbol: string
|
| 30 |
-
quantity: number
|
| 31 |
-
avgCost: number
|
| 32 |
-
entryDate: string
|
| 33 |
-
notional: number
|
| 34 |
-
}>
|
| 35 |
-
pnlPct: number
|
| 36 |
-
unrealizedPnl: number
|
| 37 |
-
}
|
| 38 |
-
|
| 39 |
-
interface EquityPoint {
|
| 40 |
-
date: string
|
| 41 |
-
equity: number
|
| 42 |
-
cash: number
|
| 43 |
-
positions: number
|
| 44 |
-
realizedPnl: number
|
| 45 |
-
}
|
| 46 |
-
|
| 47 |
-
interface OpenTrade {
|
| 48 |
-
symbol: string
|
| 49 |
-
quantity: number
|
| 50 |
-
entryPrice: number
|
| 51 |
-
entryDate: string
|
| 52 |
-
confidence: number
|
| 53 |
-
predictedReturn: number
|
| 54 |
-
commission: number
|
| 55 |
-
}
|
| 56 |
-
|
| 57 |
-
interface ClosedTrade {
|
| 58 |
-
symbol: string
|
| 59 |
-
quantity: number
|
| 60 |
-
entryPrice: number
|
| 61 |
-
exitPrice: number
|
| 62 |
-
entryDate: string
|
| 63 |
-
exitDate: string
|
| 64 |
-
netPnl: number
|
| 65 |
-
returnPct: number
|
| 66 |
-
holdingDays: number
|
| 67 |
-
exitReason: string
|
| 68 |
-
}
|
| 69 |
-
|
| 70 |
-
interface Performance {
|
| 71 |
-
totalPnl: number
|
| 72 |
-
closedTradesCount: number
|
| 73 |
-
openTradesCount: number
|
| 74 |
-
winRate: number
|
| 75 |
-
profitFactor: number | string
|
| 76 |
-
avgWin: number
|
| 77 |
-
avgLoss: number
|
| 78 |
-
bestTrade: number
|
| 79 |
-
worstTrade: number
|
| 80 |
-
}
|
| 81 |
-
|
| 82 |
-
interface Signal {
|
| 83 |
-
date: string
|
| 84 |
-
symbol: string
|
| 85 |
-
signal: string
|
| 86 |
-
mlSignal: string
|
| 87 |
-
techSignal: string
|
| 88 |
-
confidence: number
|
| 89 |
-
predictedReturn: number
|
| 90 |
-
actionTaken: string
|
| 91 |
-
}
|
| 92 |
-
|
| 93 |
-
interface MarketSummary {
|
| 94 |
-
marketId: string
|
| 95 |
-
displayName: string
|
| 96 |
-
currency: string
|
| 97 |
-
status: {
|
| 98 |
-
isRunning: boolean
|
| 99 |
-
workerRunning: boolean
|
| 100 |
-
workerPid: number | null
|
| 101 |
-
lastRunDate: string | null
|
| 102 |
-
totalDaysRun: number
|
| 103 |
-
totalTrades: number
|
| 104 |
-
currentPhase: string | null
|
| 105 |
-
lastWorkerStatus?: string | null
|
| 106 |
-
lastWorkerReason?: string | null
|
| 107 |
-
}
|
| 108 |
-
portfolio: Portfolio
|
| 109 |
-
scan: {
|
| 110 |
-
universe: string | null
|
| 111 |
-
completed: boolean
|
| 112 |
-
stage1Count: number
|
| 113 |
-
stage2Count: number
|
| 114 |
-
updatedAt: string | null
|
| 115 |
-
}
|
| 116 |
-
openTrades?: OpenTrade[]
|
| 117 |
-
closedTrades?: ClosedTrade[]
|
| 118 |
-
equityCurve?: EquityPoint[]
|
| 119 |
-
signals?: Signal[]
|
| 120 |
-
performance?: Performance
|
| 121 |
-
eligibleStocks: string[]
|
| 122 |
-
}
|
| 123 |
-
|
| 124 |
-
interface TradingApiResponse {
|
| 125 |
-
markets?: Record<string, MarketSummary>
|
| 126 |
-
error?: string
|
| 127 |
-
}
|
| 128 |
-
|
| 129 |
-
// ─── Helpers ───────────────────────────────
|
| 130 |
-
function fmtUSD(n: number | undefined | null): string {
|
| 131 |
-
if (n == null) return '—'
|
| 132 |
-
return new Intl.NumberFormat('en-US', { style: 'currency', currency: 'USD', maximumFractionDigits: 2 }).format(n)
|
| 133 |
-
}
|
| 134 |
-
|
| 135 |
-
function fmtPct(n: number | undefined | null): string {
|
| 136 |
-
if (n == null) return '—'
|
| 137 |
-
return `${n >= 0 ? '+' : ''}${n.toFixed(2)}%`
|
| 138 |
-
}
|
| 139 |
-
|
| 140 |
-
function fmtDate(d: string | undefined | null): string {
|
| 141 |
-
if (!d) return '—'
|
| 142 |
-
const date = new Date(d)
|
| 143 |
-
if (isNaN(date.getTime())) return d
|
| 144 |
-
return date.toLocaleDateString('en-US', { month: 'short', day: 'numeric', year: 'numeric' })
|
| 145 |
-
}
|
| 146 |
-
|
| 147 |
-
function signalBadgeClass(signal: string): string {
|
| 148 |
-
const s = signal?.toUpperCase() ?? ''
|
| 149 |
-
if (s.includes('BUY') || s === 'LONG') return 'bg-green-100 text-green-700 border-green-200'
|
| 150 |
-
if (s.includes('SELL') || s === 'SHORT') return 'bg-red-100 text-red-700 border-red-200'
|
| 151 |
-
return 'bg-gray-100 text-gray-600 border-gray-200'
|
| 152 |
-
}
|
| 153 |
-
|
| 154 |
-
function actionBadgeClass(action: string): string {
|
| 155 |
-
const a = action?.toUpperCase() ?? ''
|
| 156 |
-
if (a.includes('EXECUTED')) return 'bg-blue-100 text-blue-700 border-blue-200'
|
| 157 |
-
if (a.includes('REJECTED') || a.includes('NO_POSITION') || a.includes('SKIP')) return 'bg-amber-100 text-amber-700 border-amber-200'
|
| 158 |
-
return 'bg-gray-100 text-gray-600 border-gray-200'
|
| 159 |
-
}
|
| 160 |
-
|
| 161 |
-
// ─── Sub-components ─────────────────────────
|
| 162 |
-
function StatCard({ label, value, sub, positive }: { label: string; value: string; sub?: string; positive?: boolean }) {
|
| 163 |
-
return (
|
| 164 |
-
<div className="bg-white border border-gray-200 rounded-lg p-4">
|
| 165 |
-
<p className="text-xs text-gray-500 uppercase tracking-wide mb-1">{label}</p>
|
| 166 |
-
<p className={`text-xl font-bold ${positive === true ? 'text-green-600' : positive === false ? 'text-red-600' : 'text-gray-900'}`}>
|
| 167 |
-
{value}
|
| 168 |
-
</p>
|
| 169 |
-
{sub && <p className="text-xs text-gray-500 mt-0.5">{sub}</p>}
|
| 170 |
-
</div>
|
| 171 |
-
)
|
| 172 |
-
}
|
| 173 |
-
|
| 174 |
-
// ─── Main Page ─────────────────────────────
|
| 175 |
-
export default function USMarketPage() {
|
| 176 |
-
const [market, setMarket] = useState<MarketSummary | null>(null)
|
| 177 |
-
const [loading, setLoading] = useState(true)
|
| 178 |
-
const [error, setError] = useState<string | null>(null)
|
| 179 |
-
const [lastRefresh, setLastRefresh] = useState<Date | null>(null)
|
| 180 |
-
const [showEquity, setShowEquity] = useState(false)
|
| 181 |
-
|
| 182 |
-
const load = useCallback(async () => {
|
| 183 |
-
setLoading(true)
|
| 184 |
-
setError(null)
|
| 185 |
-
try {
|
| 186 |
-
const data = await fetchJson<TradingApiResponse>('/api/trading')
|
| 187 |
-
const us = data?.markets?.us ?? null
|
| 188 |
-
if (!us) {
|
| 189 |
-
setError('US market data not available')
|
| 190 |
-
} else {
|
| 191 |
-
setMarket(us)
|
| 192 |
-
setLastRefresh(new Date())
|
| 193 |
-
}
|
| 194 |
-
} catch (e) {
|
| 195 |
-
setError(e instanceof Error ? e.message : 'Failed to load US market data')
|
| 196 |
-
} finally {
|
| 197 |
-
setLoading(false)
|
| 198 |
-
}
|
| 199 |
-
}, [])
|
| 200 |
-
|
| 201 |
-
useEffect(() => { load() }, [load])
|
| 202 |
-
|
| 203 |
-
// ── Loading ──
|
| 204 |
-
if (loading) {
|
| 205 |
-
return (
|
| 206 |
-
<div className="container mx-auto px-4 py-8 max-w-5xl">
|
| 207 |
-
<div className="animate-pulse space-y-4">
|
| 208 |
-
<div className="h-8 bg-gray-200 rounded w-64" />
|
| 209 |
-
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
|
| 210 |
-
{[...Array(4)].map((_, i) => <div key={i} className="h-24 bg-gray-200 rounded-lg" />)}
|
| 211 |
-
</div>
|
| 212 |
-
<div className="h-48 bg-gray-200 rounded-lg" />
|
| 213 |
-
</div>
|
| 214 |
-
</div>
|
| 215 |
-
)
|
| 216 |
-
}
|
| 217 |
-
|
| 218 |
-
// ── Error ──
|
| 219 |
-
if (error || !market) {
|
| 220 |
-
return (
|
| 221 |
-
<div className="container mx-auto px-4 py-8 max-w-5xl">
|
| 222 |
-
<div className="flex items-center gap-3 mb-6">
|
| 223 |
-
<Globe className="w-7 h-7 text-blue-600" />
|
| 224 |
-
<h1 className="text-2xl font-bold text-gray-900">US Equities</h1>
|
| 225 |
-
</div>
|
| 226 |
-
<div className="bg-red-50 border border-red-200 rounded-lg p-6 text-center">
|
| 227 |
-
<p className="text-red-700 font-medium">{error ?? 'No data'}</p>
|
| 228 |
-
<button onClick={load} className="mt-3 px-4 py-2 bg-red-600 text-white rounded-lg text-sm hover:bg-red-700">
|
| 229 |
-
Retry
|
| 230 |
-
</button>
|
| 231 |
-
</div>
|
| 232 |
-
</div>
|
| 233 |
-
)
|
| 234 |
-
}
|
| 235 |
-
|
| 236 |
-
const perf = market.performance
|
| 237 |
-
const openTrades = market.openTrades ?? []
|
| 238 |
-
const closedTrades = market.closedTrades ?? []
|
| 239 |
-
const signals = market.signals ?? []
|
| 240 |
-
const equityCurve = market.equityCurve ?? []
|
| 241 |
-
const equity = market.portfolio.equity ?? market.portfolio.cash
|
| 242 |
-
const initialCapital = 100000
|
| 243 |
-
const totalReturn = ((equity - initialCapital) / initialCapital) * 100
|
| 244 |
-
|
| 245 |
-
return (
|
| 246 |
-
<div className="container mx-auto px-4 py-6 max-w-5xl">
|
| 247 |
-
|
| 248 |
-
{/* Header */}
|
| 249 |
-
<div className="flex items-center justify-between mb-6 flex-wrap gap-3">
|
| 250 |
-
<div className="flex items-center gap-3">
|
| 251 |
-
<Globe className="w-7 h-7 text-blue-600" />
|
| 252 |
-
<div>
|
| 253 |
-
<h1 className="text-2xl font-bold text-gray-900">US Equities</h1>
|
| 254 |
-
<p className="text-sm text-gray-500">S&P 100 · NYSE · Paper Trading</p>
|
| 255 |
-
</div>
|
| 256 |
-
<span className={`ml-2 px-3 py-1 rounded-full text-xs font-semibold border ${market.status.workerRunning ? 'bg-green-100 text-green-700 border-green-200' : 'bg-gray-100 text-gray-500 border-gray-200'}`}>
|
| 257 |
-
{market.status.workerRunning ? 'Worker Active' : 'Worker Idle'}
|
| 258 |
-
</span>
|
| 259 |
-
</div>
|
| 260 |
-
<div className="flex items-center gap-2">
|
| 261 |
-
{lastRefresh && (
|
| 262 |
-
<span className="text-xs text-gray-400">
|
| 263 |
-
Updated {lastRefresh.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit' })}
|
| 264 |
-
</span>
|
| 265 |
-
)}
|
| 266 |
-
<button
|
| 267 |
-
onClick={load}
|
| 268 |
-
className="flex items-center gap-1.5 px-3 py-1.5 rounded-lg border border-gray-200 bg-white text-gray-600 text-sm hover:bg-gray-50"
|
| 269 |
-
>
|
| 270 |
-
<RefreshCw className="w-3.5 h-3.5" />
|
| 271 |
-
Refresh
|
| 272 |
-
</button>
|
| 273 |
-
<Link
|
| 274 |
-
href="/auto-trading"
|
| 275 |
-
className="flex items-center gap-1.5 px-3 py-1.5 rounded-lg bg-blue-600 text-white text-sm hover:bg-blue-700"
|
| 276 |
-
>
|
| 277 |
-
<Bot className="w-3.5 h-3.5" />
|
| 278 |
-
Trading Panel
|
| 279 |
-
<ArrowRight className="w-3.5 h-3.5" />
|
| 280 |
-
</Link>
|
| 281 |
-
</div>
|
| 282 |
-
</div>
|
| 283 |
-
|
| 284 |
-
{/* Portfolio Stats */}
|
| 285 |
-
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6">
|
| 286 |
-
<StatCard label="Portfolio Equity" value={fmtUSD(equity)} />
|
| 287 |
-
<StatCard
|
| 288 |
-
label="Total Return"
|
| 289 |
-
value={fmtPct(totalReturn)}
|
| 290 |
-
positive={totalReturn >= 0}
|
| 291 |
-
/>
|
| 292 |
-
<StatCard label="Cash" value={fmtUSD(market.portfolio.cash)} />
|
| 293 |
-
<StatCard label="Open Positions" value={`${market.portfolio.positionCount}`} sub={market.portfolio.positionCount === 1 ? 'position' : 'positions'} />
|
| 294 |
-
</div>
|
| 295 |
-
|
| 296 |
-
{/* Performance row */}
|
| 297 |
-
{perf && (
|
| 298 |
-
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6">
|
| 299 |
-
<StatCard
|
| 300 |
-
label="Realized P&L"
|
| 301 |
-
value={fmtUSD(perf.totalPnl)}
|
| 302 |
-
positive={perf.totalPnl >= 0}
|
| 303 |
-
/>
|
| 304 |
-
<StatCard label="Closed Trades" value={`${perf.closedTradesCount}`} />
|
| 305 |
-
<StatCard label="Win Rate" value={perf.closedTradesCount > 0 ? `${perf.winRate.toFixed(1)}%` : '—'} />
|
| 306 |
-
<StatCard label="Profit Factor" value={typeof perf.profitFactor === 'number' ? (perf.profitFactor === 0 ? '—' : perf.profitFactor.toFixed(2)) : String(perf.profitFactor)} />
|
| 307 |
-
</div>
|
| 308 |
-
)}
|
| 309 |
-
|
| 310 |
-
{/* Worker Status bar */}
|
| 311 |
-
<div className="bg-white border border-gray-200 rounded-lg px-4 py-3 mb-6 text-sm text-gray-600 flex flex-wrap gap-4">
|
| 312 |
-
<span><span className="font-semibold text-gray-800">Last Run:</span> {fmtDate(market.status.lastRunDate)}</span>
|
| 313 |
-
<span><span className="font-semibold text-gray-800">Days Run:</span> {market.status.totalDaysRun}</span>
|
| 314 |
-
<span><span className="font-semibold text-gray-800">Total Trades:</span> {market.status.totalTrades}</span>
|
| 315 |
-
{market.status.lastWorkerStatus && (
|
| 316 |
-
<span><span className="font-semibold text-gray-800">Status:</span> {market.status.lastWorkerStatus}</span>
|
| 317 |
-
)}
|
| 318 |
-
{market.scan.universe && (
|
| 319 |
-
<span><span className="font-semibold text-gray-800">Universe:</span> {market.scan.universe}</span>
|
| 320 |
-
)}
|
| 321 |
-
{market.scan.stage1Count > 0 && (
|
| 322 |
-
<span><span className="font-semibold text-gray-800">Scan:</span> {market.scan.stage1Count} → {market.scan.stage2Count} stocks</span>
|
| 323 |
-
)}
|
| 324 |
-
</div>
|
| 325 |
-
|
| 326 |
-
{/* Eligible Stocks (Trade Pool) */}
|
| 327 |
-
{market.eligibleStocks.length > 0 && (
|
| 328 |
-
<div className="bg-white border border-gray-200 rounded-lg p-4 mb-6">
|
| 329 |
-
<h2 className="font-semibold text-gray-700 mb-2 flex items-center gap-2">
|
| 330 |
-
<Target className="w-5 h-5 text-green-500" />
|
| 331 |
-
Trade Pool ({market.eligibleStocks.length} {market.eligibleStocks.length === 1 ? 'stock' : 'stocks'})
|
| 332 |
-
</h2>
|
| 333 |
-
<p className="text-sm text-gray-500 mb-3">
|
| 334 |
-
Stocks eligible for trading based on ML scan results.
|
| 335 |
-
</p>
|
| 336 |
-
<div className="flex flex-wrap gap-2">
|
| 337 |
-
{market.eligibleStocks.map((sym) => (
|
| 338 |
-
<span key={sym} className="px-3 py-1.5 bg-green-50 text-green-700 border border-green-200 rounded-full text-sm font-semibold">
|
| 339 |
-
{sym}
|
| 340 |
-
</span>
|
| 341 |
-
))}
|
| 342 |
-
</div>
|
| 343 |
-
</div>
|
| 344 |
-
)}
|
| 345 |
-
|
| 346 |
-
{/* Open Positions */}
|
| 347 |
-
{openTrades.length > 0 && (
|
| 348 |
-
<div className="bg-white border border-gray-200 rounded-lg mb-6 overflow-hidden">
|
| 349 |
-
<div className="px-4 py-3 border-b border-gray-100 flex items-center gap-2">
|
| 350 |
-
<Activity className="w-5 h-5 text-blue-500" />
|
| 351 |
-
<h2 className="font-semibold text-gray-700">Open Positions ({openTrades.length})</h2>
|
| 352 |
-
</div>
|
| 353 |
-
<div className="overflow-x-auto">
|
| 354 |
-
<table className="w-full text-sm">
|
| 355 |
-
<thead>
|
| 356 |
-
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 357 |
-
<th className="px-4 py-2 text-left">Symbol</th>
|
| 358 |
-
<th className="px-4 py-2 text-right">Qty</th>
|
| 359 |
-
<th className="px-4 py-2 text-right">Entry Price</th>
|
| 360 |
-
<th className="px-4 py-2 text-left">Entry Date</th>
|
| 361 |
-
<th className="px-4 py-2 text-right">Confidence</th>
|
| 362 |
-
<th className="px-4 py-2 text-right">Predicted Ret.</th>
|
| 363 |
-
</tr>
|
| 364 |
-
</thead>
|
| 365 |
-
<tbody>
|
| 366 |
-
{openTrades.map((t, i) => (
|
| 367 |
-
<tr key={i} className="border-t border-gray-100 hover:bg-gray-50">
|
| 368 |
-
<td className="px-4 py-2.5 font-semibold text-gray-900">{t.symbol}</td>
|
| 369 |
-
<td className="px-4 py-2.5 text-right text-gray-700">{t.quantity}</td>
|
| 370 |
-
<td className="px-4 py-2.5 text-right text-gray-700">{fmtUSD(t.entryPrice)}</td>
|
| 371 |
-
<td className="px-4 py-2.5 text-gray-600">{fmtDate(t.entryDate)}</td>
|
| 372 |
-
<td className="px-4 py-2.5 text-right text-gray-700">{t.confidence != null ? `${(t.confidence * 100).toFixed(0)}%` : '—'}</td>
|
| 373 |
-
<td className="px-4 py-2.5 text-right text-gray-700">{t.predictedReturn != null ? fmtPct(t.predictedReturn * 100) : '—'}</td>
|
| 374 |
-
</tr>
|
| 375 |
-
))}
|
| 376 |
-
</tbody>
|
| 377 |
-
</table>
|
| 378 |
-
</div>
|
| 379 |
-
</div>
|
| 380 |
-
)}
|
| 381 |
-
|
| 382 |
-
{/* Recent Signals */}
|
| 383 |
-
{signals.length > 0 && (
|
| 384 |
-
<div className="bg-white border border-gray-200 rounded-lg mb-6 overflow-hidden">
|
| 385 |
-
<div className="px-4 py-3 border-b border-gray-100 flex items-center gap-2">
|
| 386 |
-
<Zap className="w-5 h-5 text-amber-500" />
|
| 387 |
-
<h2 className="font-semibold text-gray-700">Recent Signals ({signals.length})</h2>
|
| 388 |
-
</div>
|
| 389 |
-
<div className="overflow-x-auto">
|
| 390 |
-
<table className="w-full text-sm">
|
| 391 |
-
<thead>
|
| 392 |
-
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 393 |
-
<th className="px-4 py-2 text-left">Date</th>
|
| 394 |
-
<th className="px-4 py-2 text-left">Symbol</th>
|
| 395 |
-
<th className="px-4 py-2 text-center">Signal</th>
|
| 396 |
-
<th className="px-4 py-2 text-center">ML</th>
|
| 397 |
-
<th className="px-4 py-2 text-center">Tech</th>
|
| 398 |
-
<th className="px-4 py-2 text-right">Confidence</th>
|
| 399 |
-
<th className="px-4 py-2 text-center">Action Taken</th>
|
| 400 |
-
</tr>
|
| 401 |
-
</thead>
|
| 402 |
-
<tbody>
|
| 403 |
-
{signals.slice(0, 20).map((s, i) => (
|
| 404 |
-
<tr key={i} className="border-t border-gray-100 hover:bg-gray-50">
|
| 405 |
-
<td className="px-4 py-2.5 text-gray-500 text-xs">{fmtDate(s.date)}</td>
|
| 406 |
-
<td className="px-4 py-2.5 font-semibold text-gray-900">{s.symbol}</td>
|
| 407 |
-
<td className="px-4 py-2.5 text-center">
|
| 408 |
-
<span className={`px-2 py-0.5 rounded-full text-xs font-medium border ${signalBadgeClass(s.signal)}`}>
|
| 409 |
-
{s.signal}
|
| 410 |
-
</span>
|
| 411 |
-
</td>
|
| 412 |
-
<td className="px-4 py-2.5 text-center text-xs text-gray-600">{s.mlSignal ?? '—'}</td>
|
| 413 |
-
<td className="px-4 py-2.5 text-center text-xs text-gray-600">{s.techSignal ?? '—'}</td>
|
| 414 |
-
<td className="px-4 py-2.5 text-right text-gray-700">
|
| 415 |
-
{s.confidence != null ? `${(s.confidence * 100).toFixed(0)}%` : '—'}
|
| 416 |
-
</td>
|
| 417 |
-
<td className="px-4 py-2.5 text-center">
|
| 418 |
-
<span className={`px-2 py-0.5 rounded-full text-xs font-medium border ${actionBadgeClass(s.actionTaken)}`}>
|
| 419 |
-
{s.actionTaken ?? '—'}
|
| 420 |
-
</span>
|
| 421 |
-
</td>
|
| 422 |
-
</tr>
|
| 423 |
-
))}
|
| 424 |
-
</tbody>
|
| 425 |
-
</table>
|
| 426 |
-
</div>
|
| 427 |
-
</div>
|
| 428 |
-
)}
|
| 429 |
-
|
| 430 |
-
{/* Closed Trades */}
|
| 431 |
-
{closedTrades.length > 0 && (
|
| 432 |
-
<div className="bg-white border border-gray-200 rounded-lg mb-6 overflow-hidden">
|
| 433 |
-
<div className="px-4 py-3 border-b border-gray-100 flex items-center gap-2">
|
| 434 |
-
<Clock className="w-5 h-5 text-purple-500" />
|
| 435 |
-
<h2 className="font-semibold text-gray-700">Closed Trades ({closedTrades.length})</h2>
|
| 436 |
-
</div>
|
| 437 |
-
<div className="overflow-x-auto">
|
| 438 |
-
<table className="w-full text-sm">
|
| 439 |
-
<thead>
|
| 440 |
-
<tr className="bg-gray-50 text-gray-500 text-xs uppercase">
|
| 441 |
-
<th className="px-4 py-2 text-left">Symbol</th>
|
| 442 |
-
<th className="px-4 py-2 text-right">Qty</th>
|
| 443 |
-
<th className="px-4 py-2 text-right">Entry</th>
|
| 444 |
-
<th className="px-4 py-2 text-right">Exit</th>
|
| 445 |
-
<th className="px-4 py-2 text-right">Net P&L</th>
|
| 446 |
-
<th className="px-4 py-2 text-right">Return</th>
|
| 447 |
-
<th className="px-4 py-2 text-right">Days</th>
|
| 448 |
-
<th className="px-4 py-2 text-left">Exit Reason</th>
|
| 449 |
-
</tr>
|
| 450 |
-
</thead>
|
| 451 |
-
<tbody>
|
| 452 |
-
{closedTrades.map((t, i) => (
|
| 453 |
-
<tr key={i} className="border-t border-gray-100 hover:bg-gray-50">
|
| 454 |
-
<td className="px-4 py-2.5 font-semibold text-gray-900">{t.symbol}</td>
|
| 455 |
-
<td className="px-4 py-2.5 text-right text-gray-700">{t.quantity}</td>
|
| 456 |
-
<td className="px-4 py-2.5 text-right text-gray-700">{fmtUSD(t.entryPrice)}</td>
|
| 457 |
-
<td className="px-4 py-2.5 text-right text-gray-700">{fmtUSD(t.exitPrice)}</td>
|
| 458 |
-
<td className={`px-4 py-2.5 text-right font-semibold ${t.netPnl >= 0 ? 'text-green-600' : 'text-red-600'}`}>
|
| 459 |
-
{fmtUSD(t.netPnl)}
|
| 460 |
-
</td>
|
| 461 |
-
<td className={`px-4 py-2.5 text-right font-medium ${t.returnPct >= 0 ? 'text-green-600' : 'text-red-600'}`}>
|
| 462 |
-
{fmtPct(t.returnPct)}
|
| 463 |
-
</td>
|
| 464 |
-
<td className="px-4 py-2.5 text-right text-gray-600">{t.holdingDays ?? '—'}</td>
|
| 465 |
-
<td className="px-4 py-2.5 text-gray-500 text-xs capitalize">{t.exitReason?.replace(/_/g, ' ') ?? '—'}</td>
|
| 466 |
-
</tr>
|
| 467 |
-
))}
|
| 468 |
-
</tbody>
|
| 469 |
-
</table>
|
| 470 |
-
</div>
|
| 471 |
-
{perf && (
|
| 472 |
-
<div className="px-4 py-3 border-t border-gray-100 bg-gray-50 flex flex-wrap gap-4 text-sm">
|
| 473 |
-
<span className={`font-semibold ${perf.totalPnl >= 0 ? 'text-green-700' : 'text-red-700'}`}>
|
| 474 |
-
Net P&L: {fmtUSD(perf.totalPnl)}
|
| 475 |
-
</span>
|
| 476 |
-
{perf.closedTradesCount > 0 && (
|
| 477 |
-
<>
|
| 478 |
-
<span className="text-gray-600">Avg Win: <span className="text-green-600 font-medium">{fmtUSD(perf.avgWin)}</span></span>
|
| 479 |
-
<span className="text-gray-600">Avg Loss: <span className="text-red-600 font-medium">-{fmtUSD(perf.avgLoss)}</span></span>
|
| 480 |
-
</>
|
| 481 |
-
)}
|
| 482 |
-
</div>
|
| 483 |
-
)}
|
| 484 |
-
</div>
|
| 485 |
-
)}
|
| 486 |
-
|
| 487 |
-
{/* Equity Curve */}
|
| 488 |
-
{equityCurve.length > 0 && (
|
| 489 |
-
<div className="bg-white border border-gray-200 rounded-lg mb-6 overflow-hidden">
|
| 490 |
-
<button
|
| 491 |
-
onClick={() => setShowEquity(!showEquity)}
|
| 492 |
-
className="w-full px-4 py-3 flex items-center justify-between hover:bg-gray-50"
|
| 493 |
-
>
|
| 494 |
-
<span className="flex items-center gap-2 font-semibold text-gray-700">
|
| 495 |
-
<BarChart3 className="w-5 h-5 text-blue-500" />
|
| 496 |
-
Equity Curve ({equityCurve.length} days)
|
| 497 |
-
</span>
|
| 498 |
-
{showEquity ? <ChevronUp className="w-5 h-5 text-gray-400" /> : <ChevronDown className="w-5 h-5 text-gray-400" />}
|
| 499 |
-
</button>
|
| 500 |
-
{showEquity && (
|
| 501 |
-
<div className="px-4 pb-4">
|
| 502 |
-
<div className="flex items-end gap-[2px] h-32">
|
| 503 |
-
{equityCurve.slice(-60).map((pt, i) => {
|
| 504 |
-
const slice = equityCurve.slice(-60)
|
| 505 |
-
const min = Math.min(...slice.map((e) => e.equity))
|
| 506 |
-
const max = Math.max(...slice.map((e) => e.equity))
|
| 507 |
-
const range = max - min || 1
|
| 508 |
-
const height = ((pt.equity - min) / range) * 100
|
| 509 |
-
const isLast = i === slice.length - 1
|
| 510 |
-
const isUp = pt.equity >= initialCapital
|
| 511 |
-
return (
|
| 512 |
-
<div
|
| 513 |
-
key={i}
|
| 514 |
-
title={`${pt.date}: ${fmtUSD(pt.equity)}`}
|
| 515 |
-
style={{ height: `${Math.max(4, height)}%` }}
|
| 516 |
-
className={`flex-1 rounded-t transition-all ${isLast ? 'opacity-100' : 'opacity-70'} ${isUp ? 'bg-green-400' : 'bg-red-400'}`}
|
| 517 |
-
/>
|
| 518 |
-
)
|
| 519 |
-
})}
|
| 520 |
-
</div>
|
| 521 |
-
<div className="flex justify-between text-xs text-gray-400 mt-1">
|
| 522 |
-
<span>{equityCurve.slice(-60)[0]?.date}</span>
|
| 523 |
-
<span>{equityCurve.slice(-1)[0]?.date}</span>
|
| 524 |
-
</div>
|
| 525 |
-
</div>
|
| 526 |
-
)}
|
| 527 |
-
</div>
|
| 528 |
-
)}
|
| 529 |
-
|
| 530 |
-
{/* Empty state */}
|
| 531 |
-
{openTrades.length === 0 && closedTrades.length === 0 && signals.length === 0 && (
|
| 532 |
-
<div className="bg-gray-50 border border-gray-200 rounded-lg p-8 text-center text-gray-500 mb-6">
|
| 533 |
-
<DollarSign className="w-10 h-10 mx-auto mb-3 text-gray-300" />
|
| 534 |
-
<p className="font-medium">No trading activity yet</p>
|
| 535 |
-
<p className="text-sm mt-1">Run the US market worker to generate signals and trades.</p>
|
| 536 |
-
<Link href="/auto-trading" className="inline-flex items-center gap-2 mt-4 px-4 py-2 bg-blue-600 text-white rounded-lg text-sm hover:bg-blue-700">
|
| 537 |
-
<Bot className="w-4 h-4" />
|
| 538 |
-
Go to Trading Panel
|
| 539 |
-
</Link>
|
| 540 |
-
</div>
|
| 541 |
-
)}
|
| 542 |
-
|
| 543 |
-
{/* Footer link */}
|
| 544 |
-
<div className="flex justify-center">
|
| 545 |
-
<Link
|
| 546 |
-
href="/auto-trading"
|
| 547 |
-
className="flex items-center gap-2 px-5 py-2.5 rounded-lg border border-blue-200 bg-blue-50 text-blue-700 text-sm font-medium hover:bg-blue-100"
|
| 548 |
-
>
|
| 549 |
-
<Bot className="w-4 h-4" />
|
| 550 |
-
Manage US Worker in Trading Panel
|
| 551 |
-
<ArrowRight className="w-4 h-4" />
|
| 552 |
-
</Link>
|
| 553 |
-
</div>
|
| 554 |
-
</div>
|
| 555 |
-
)
|
| 556 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
huggingface-space/nextjs-app/src/components/MLPredictionCard.tsx
CHANGED
|
@@ -4,7 +4,6 @@ import { useCallback, useEffect, useState } from 'react';
|
|
| 4 |
import { TrendingUp, TrendingDown, Minus, Brain, Target, Activity } from 'lucide-react';
|
| 5 |
|
| 6 |
import { fetchJson } from '@/lib/http';
|
| 7 |
-
import { useMarket } from '@/contexts/MarketContext';
|
| 8 |
|
| 9 |
interface PredictionData {
|
| 10 |
symbol: string;
|
|
@@ -21,8 +20,6 @@ interface MLPredictionCardProps {
|
|
| 21 |
}
|
| 22 |
|
| 23 |
export default function MLPredictionCard({ symbol }: MLPredictionCardProps) {
|
| 24 |
-
const { market } = useMarket();
|
| 25 |
-
const isUS = market === 'us';
|
| 26 |
const [prediction, setPrediction] = useState<PredictionData | null>(null);
|
| 27 |
const [loading, setLoading] = useState(true);
|
| 28 |
const [error, setError] = useState<string | null>(null);
|
|
@@ -43,7 +40,7 @@ export default function MLPredictionCard({ symbol }: MLPredictionCardProps) {
|
|
| 43 |
{
|
| 44 |
timeoutMs: 30000,
|
| 45 |
retries: 0,
|
| 46 |
-
jsonBody: { symbols: [String(symbol || '').toUpperCase()], days_ahead: 7, model: 'ensemble'
|
| 47 |
}
|
| 48 |
);
|
| 49 |
|
|
@@ -81,7 +78,7 @@ export default function MLPredictionCard({ symbol }: MLPredictionCardProps) {
|
|
| 81 |
} finally {
|
| 82 |
setLoading(false);
|
| 83 |
}
|
| 84 |
-
}, [
|
| 85 |
|
| 86 |
useEffect(() => {
|
| 87 |
fetchPrediction();
|
|
@@ -180,7 +177,7 @@ export default function MLPredictionCard({ symbol }: MLPredictionCardProps) {
|
|
| 180 |
<div>
|
| 181 |
<div className="text-sm opacity-75">Mevcut Fiyat</div>
|
| 182 |
<div className="text-xl font-semibold">
|
| 183 |
-
{typeof prediction.current_price === 'number' ? `
|
| 184 |
</div>
|
| 185 |
</div>
|
| 186 |
<div>
|
|
|
|
| 4 |
import { TrendingUp, TrendingDown, Minus, Brain, Target, Activity } from 'lucide-react';
|
| 5 |
|
| 6 |
import { fetchJson } from '@/lib/http';
|
|
|
|
| 7 |
|
| 8 |
interface PredictionData {
|
| 9 |
symbol: string;
|
|
|
|
| 20 |
}
|
| 21 |
|
| 22 |
export default function MLPredictionCard({ symbol }: MLPredictionCardProps) {
|
|
|
|
|
|
|
| 23 |
const [prediction, setPrediction] = useState<PredictionData | null>(null);
|
| 24 |
const [loading, setLoading] = useState(true);
|
| 25 |
const [error, setError] = useState<string | null>(null);
|
|
|
|
| 40 |
{
|
| 41 |
timeoutMs: 30000,
|
| 42 |
retries: 0,
|
| 43 |
+
jsonBody: { symbols: [String(symbol || '').toUpperCase()], days_ahead: 7, model: 'ensemble' },
|
| 44 |
}
|
| 45 |
);
|
| 46 |
|
|
|
|
| 78 |
} finally {
|
| 79 |
setLoading(false);
|
| 80 |
}
|
| 81 |
+
}, [symbol]);
|
| 82 |
|
| 83 |
useEffect(() => {
|
| 84 |
fetchPrediction();
|
|
|
|
| 177 |
<div>
|
| 178 |
<div className="text-sm opacity-75">Mevcut Fiyat</div>
|
| 179 |
<div className="text-xl font-semibold">
|
| 180 |
+
{typeof prediction.current_price === 'number' ? `₺${prediction.current_price.toFixed(2)}` : '-'}
|
| 181 |
</div>
|
| 182 |
</div>
|
| 183 |
<div>
|
huggingface-space/nextjs-app/src/components/Navigation.tsx
CHANGED
|
@@ -5,7 +5,7 @@ import { useState, useRef, useEffect } from 'react'
|
|
| 5 |
import {
|
| 6 |
TrendingUp, BarChart3, Brain, Sparkles, Briefcase, LogIn, LogOut,
|
| 7 |
Activity, Search, Zap, Building2, History, Newspaper, Target, Menu, X, Star, Megaphone,
|
| 8 |
-
ChevronDown, Bot, Crosshair, Wrench
|
| 9 |
} from 'lucide-react'
|
| 10 |
import { useAuth } from '@/contexts/AuthContext'
|
| 11 |
|
|
@@ -55,9 +55,6 @@ export function Navigation() {
|
|
| 55 |
<DropdownLink href="/auto-trading" icon={<Bot className="h-4 w-4" />}>
|
| 56 |
Otomatik Trading
|
| 57 |
</DropdownLink>
|
| 58 |
-
<DropdownLink href="/us-market" icon={<Globe className="h-4 w-4" />}>
|
| 59 |
-
US Equities
|
| 60 |
-
</DropdownLink>
|
| 61 |
<DropdownLink href="/backtest" icon={<Target className="h-4 w-4" />}>
|
| 62 |
Backtest
|
| 63 |
</DropdownLink>
|
|
@@ -151,9 +148,6 @@ export function Navigation() {
|
|
| 151 |
<MobileNavLink href="/auto-trading" icon={<Bot className="h-5 w-5" />} onClick={() => setMobileMenuOpen(false)}>
|
| 152 |
Otomatik Trading
|
| 153 |
</MobileNavLink>
|
| 154 |
-
<MobileNavLink href="/us-market" icon={<Globe className="h-5 w-5" />} onClick={() => setMobileMenuOpen(false)}>
|
| 155 |
-
US Equities
|
| 156 |
-
</MobileNavLink>
|
| 157 |
<MobileNavLink href="/backtest" icon={<Target className="h-5 w-5" />} onClick={() => setMobileMenuOpen(false)}>
|
| 158 |
Backtest
|
| 159 |
</MobileNavLink>
|
|
|
|
| 5 |
import {
|
| 6 |
TrendingUp, BarChart3, Brain, Sparkles, Briefcase, LogIn, LogOut,
|
| 7 |
Activity, Search, Zap, Building2, History, Newspaper, Target, Menu, X, Star, Megaphone,
|
| 8 |
+
ChevronDown, Bot, Crosshair, Wrench
|
| 9 |
} from 'lucide-react'
|
| 10 |
import { useAuth } from '@/contexts/AuthContext'
|
| 11 |
|
|
|
|
| 55 |
<DropdownLink href="/auto-trading" icon={<Bot className="h-4 w-4" />}>
|
| 56 |
Otomatik Trading
|
| 57 |
</DropdownLink>
|
|
|
|
|
|
|
|
|
|
| 58 |
<DropdownLink href="/backtest" icon={<Target className="h-4 w-4" />}>
|
| 59 |
Backtest
|
| 60 |
</DropdownLink>
|
|
|
|
| 148 |
<MobileNavLink href="/auto-trading" icon={<Bot className="h-5 w-5" />} onClick={() => setMobileMenuOpen(false)}>
|
| 149 |
Otomatik Trading
|
| 150 |
</MobileNavLink>
|
|
|
|
|
|
|
|
|
|
| 151 |
<MobileNavLink href="/backtest" icon={<Target className="h-5 w-5" />} onClick={() => setMobileMenuOpen(false)}>
|
| 152 |
Backtest
|
| 153 |
</MobileNavLink>
|
huggingface-space/nextjs-app/src/components/TopMLPredictions.tsx
CHANGED
|
@@ -5,7 +5,6 @@ import { Brain, TrendingUp, TrendingDown, Loader2 } from 'lucide-react';
|
|
| 5 |
import Link from 'next/link';
|
| 6 |
|
| 7 |
import { fetchJson } from '@/lib/http';
|
| 8 |
-
import { useMarket } from '@/contexts/MarketContext';
|
| 9 |
|
| 10 |
interface TopPrediction {
|
| 11 |
symbol: string;
|
|
@@ -27,8 +26,6 @@ function fmt2(value: unknown): string {
|
|
| 27 |
}
|
| 28 |
|
| 29 |
export default function TopMLPredictions() {
|
| 30 |
-
const { market } = useMarket();
|
| 31 |
-
const isUS = market === 'us';
|
| 32 |
const [predictions, setPredictions] = useState<TopPrediction[]>([]);
|
| 33 |
const [loading, setLoading] = useState(true);
|
| 34 |
|
|
@@ -40,29 +37,19 @@ export default function TopMLPredictions() {
|
|
| 40 |
try {
|
| 41 |
setLoading(true);
|
| 42 |
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
return universe;
|
| 51 |
-
})()
|
| 52 |
-
: fetchJson<Record<string, unknown>>(
|
| 53 |
-
`/api/popular-stocks`,
|
| 54 |
-
{ method: 'GET' },
|
| 55 |
-
{ timeoutMs: 12000, retries: 1 }
|
| 56 |
-
);
|
| 57 |
-
const stocksData = await symbols;
|
| 58 |
-
const symbolList = (Array.isArray(stocksData.symbols) ? stocksData.symbols : Array.isArray(stocksData.stocks) ? stocksData.stocks : []) as string[];
|
| 59 |
-
const selectedSymbols = symbolList.slice(0, 10);
|
| 60 |
|
| 61 |
// Get predictions from HF
|
| 62 |
const data = await fetchJson<Record<string, unknown>>(
|
| 63 |
`/api/ml-predictions`,
|
| 64 |
{ method: 'POST' },
|
| 65 |
-
{ timeoutMs: 30000, retries: 0, jsonBody: { symbols
|
| 66 |
);
|
| 67 |
|
| 68 |
const preds = Array.isArray(data.predictions) ? data.predictions as Record<string, unknown>[] : [];
|
|
@@ -190,7 +177,7 @@ export default function TopMLPredictions() {
|
|
| 190 |
</div>
|
| 191 |
|
| 192 |
<div className="mt-3 pt-3 border-t border-gray-100 flex justify-between text-xs text-gray-500">
|
| 193 |
-
<span>Mevcut:
|
| 194 |
<span>Tahmin: ₺{fmt2(pred.predicted_price)}</span>
|
| 195 |
</div>
|
| 196 |
</Link>
|
|
|
|
| 5 |
import Link from 'next/link';
|
| 6 |
|
| 7 |
import { fetchJson } from '@/lib/http';
|
|
|
|
| 8 |
|
| 9 |
interface TopPrediction {
|
| 10 |
symbol: string;
|
|
|
|
| 26 |
}
|
| 27 |
|
| 28 |
export default function TopMLPredictions() {
|
|
|
|
|
|
|
| 29 |
const [predictions, setPredictions] = useState<TopPrediction[]>([]);
|
| 30 |
const [loading, setLoading] = useState(true);
|
| 31 |
|
|
|
|
| 37 |
try {
|
| 38 |
setLoading(true);
|
| 39 |
|
| 40 |
+
// Get popular stocks from HF
|
| 41 |
+
const stocksData = await fetchJson<Record<string, unknown>>(
|
| 42 |
+
`/api/popular-stocks`,
|
| 43 |
+
{ method: 'GET' },
|
| 44 |
+
{ timeoutMs: 12000, retries: 1 }
|
| 45 |
+
);
|
| 46 |
+
const symbols = (Array.isArray(stocksData.stocks) ? stocksData.stocks as string[] : []).slice(0, 10);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
// Get predictions from HF
|
| 49 |
const data = await fetchJson<Record<string, unknown>>(
|
| 50 |
`/api/ml-predictions`,
|
| 51 |
{ method: 'POST' },
|
| 52 |
+
{ timeoutMs: 30000, retries: 0, jsonBody: { symbols, days_ahead: 7, model: 'ensemble' } }
|
| 53 |
);
|
| 54 |
|
| 55 |
const preds = Array.isArray(data.predictions) ? data.predictions as Record<string, unknown>[] : [];
|
|
|
|
| 177 |
</div>
|
| 178 |
|
| 179 |
<div className="mt-3 pt-3 border-t border-gray-100 flex justify-between text-xs text-gray-500">
|
| 180 |
+
<span>Mevcut: ₺{fmt2(pred.current_price)}</span>
|
| 181 |
<span>Tahmin: ₺{fmt2(pred.predicted_price)}</span>
|
| 182 |
</div>
|
| 183 |
</Link>
|
huggingface-space/nextjs-app/src/lib/api-auth.ts
CHANGED
|
@@ -13,27 +13,6 @@ interface AuthError {
|
|
| 13 |
response: NextResponse
|
| 14 |
}
|
| 15 |
|
| 16 |
-
async function getUserFromBearerToken(
|
| 17 |
-
supabaseUrl: string,
|
| 18 |
-
supabaseAnonKey: string,
|
| 19 |
-
token: string,
|
| 20 |
-
) {
|
| 21 |
-
const response = await fetch(`${supabaseUrl}/auth/v1/user`, {
|
| 22 |
-
headers: {
|
| 23 |
-
apikey: supabaseAnonKey,
|
| 24 |
-
Authorization: `Bearer ${token}`,
|
| 25 |
-
},
|
| 26 |
-
cache: 'no-store',
|
| 27 |
-
})
|
| 28 |
-
|
| 29 |
-
if (!response.ok) {
|
| 30 |
-
return { data: { user: null }, error: new Error('Invalid bearer token') }
|
| 31 |
-
}
|
| 32 |
-
|
| 33 |
-
const user = await response.json()
|
| 34 |
-
return { data: { user }, error: null }
|
| 35 |
-
}
|
| 36 |
-
|
| 37 |
/**
|
| 38 |
* Verify that the incoming request belongs to an authenticated Supabase user.
|
| 39 |
*
|
|
@@ -50,7 +29,6 @@ async function getUserFromBearerToken(
|
|
| 50 |
export async function requireAuth(request?: Request): Promise<AuthResult | AuthError> {
|
| 51 |
try {
|
| 52 |
const cookieStore = await cookies()
|
| 53 |
-
const authHeader = request?.headers.get('authorization') ?? null
|
| 54 |
|
| 55 |
const supabaseUrl = process.env.NEXT_PUBLIC_SUPABASE_URL
|
| 56 |
const supabaseAnonKey = process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY
|
|
@@ -94,11 +72,11 @@ export async function requireAuth(request?: Request): Promise<AuthResult | AuthE
|
|
| 94 |
|
| 95 |
// Fallback: try Authorization header (for API clients)
|
| 96 |
if (request) {
|
|
|
|
| 97 |
if (authHeader?.startsWith('Bearer ')) {
|
| 98 |
const token = authHeader.slice(7)
|
| 99 |
const { data: { user: tokenUser }, error: tokenError } =
|
| 100 |
-
await
|
| 101 |
-
|
| 102 |
if (tokenUser && !tokenError) {
|
| 103 |
return { authenticated: true, userId: tokenUser.id, email: tokenUser.email }
|
| 104 |
}
|
|
|
|
| 13 |
response: NextResponse
|
| 14 |
}
|
| 15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
/**
|
| 17 |
* Verify that the incoming request belongs to an authenticated Supabase user.
|
| 18 |
*
|
|
|
|
| 29 |
export async function requireAuth(request?: Request): Promise<AuthResult | AuthError> {
|
| 30 |
try {
|
| 31 |
const cookieStore = await cookies()
|
|
|
|
| 32 |
|
| 33 |
const supabaseUrl = process.env.NEXT_PUBLIC_SUPABASE_URL
|
| 34 |
const supabaseAnonKey = process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY
|
|
|
|
| 72 |
|
| 73 |
// Fallback: try Authorization header (for API clients)
|
| 74 |
if (request) {
|
| 75 |
+
const authHeader = request.headers.get('authorization')
|
| 76 |
if (authHeader?.startsWith('Bearer ')) {
|
| 77 |
const token = authHeader.slice(7)
|
| 78 |
const { data: { user: tokenUser }, error: tokenError } =
|
| 79 |
+
await supabase.auth.getUser(token)
|
|
|
|
| 80 |
if (tokenUser && !tokenError) {
|
| 81 |
return { authenticated: true, userId: tokenUser.id, email: tokenUser.email }
|
| 82 |
}
|
huggingface-space/requirements.txt
CHANGED
|
@@ -3,23 +3,23 @@ fastapi==0.109.0
|
|
| 3 |
uvicorn[standard]==0.27.0
|
| 4 |
pydantic==2.5.3
|
| 5 |
|
| 6 |
-
# Data & ML
|
| 7 |
-
yfinance>=0.2.28
|
| 8 |
-
pandas>=2.0.0
|
| 9 |
-
numpy>=1.24.0
|
| 10 |
-
scikit-learn>=1.3.0
|
| 11 |
-
xgboost>=2.0.0
|
| 12 |
-
lightgbm>=4.0.0
|
| 13 |
|
| 14 |
# Utilities
|
| 15 |
requests>=2.31.0
|
| 16 |
python-dateutil>=2.8.2
|
| 17 |
pytz>=2023.3
|
|
|
|
| 18 |
|
| 19 |
-
#
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
sentencepiece>=0.2.0
|
| 23 |
|
| 24 |
# For local smoke tests via FastAPI/Starlette TestClient.
|
| 25 |
# Starlette 0.35.x is not compatible with httpx 0.28+.
|
|
|
|
| 3 |
uvicorn[standard]==0.27.0
|
| 4 |
pydantic==2.5.3
|
| 5 |
|
| 6 |
+
# Data & ML (pinned to prevent surprise breakage on rebuild)
|
| 7 |
+
yfinance>=0.2.28,<1.0
|
| 8 |
+
pandas>=2.0.0,<3.0
|
| 9 |
+
numpy>=1.24.0,<3.0
|
| 10 |
+
scikit-learn>=1.3.0,<2.0
|
| 11 |
+
xgboost>=2.0.0,<3.0
|
| 12 |
+
lightgbm>=4.0.0,<5.0
|
| 13 |
|
| 14 |
# Utilities
|
| 15 |
requests>=2.31.0
|
| 16 |
python-dateutil>=2.8.2
|
| 17 |
pytz>=2023.3
|
| 18 |
+
lxml>=5.0.0
|
| 19 |
|
| 20 |
+
# NOTE: torch, transformers, sentencepiece removed — not imported anywhere
|
| 21 |
+
# Sentiment analysis uses VADER + TextBlob, not HuggingFace models.
|
| 22 |
+
# Removing ~3 GB of unused dependencies fixes container OOM / disk issues.
|
|
|
|
| 23 |
|
| 24 |
# For local smoke tests via FastAPI/Starlette TestClient.
|
| 25 |
# Starlette 0.35.x is not compatible with httpx 0.28+.
|
huggingface-space/run_bist100_scan.py
CHANGED
|
@@ -1,45 +1,17 @@
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
-
"""
|
| 3 |
|
| 4 |
-
|
| 5 |
-
- Minimum 1 year of daily data
|
| 6 |
-
- Minimum average daily volume (configurable)
|
| 7 |
-
- Eliminates stocks that would waste backtest time
|
| 8 |
-
|
| 9 |
-
Stage 2: Full ML walk-forward backtest (only for Stage 1 passers)
|
| 10 |
-
- Uses the v3 classification ensemble
|
| 11 |
-
- Evaluates direction accuracy, Sharpe, hit rate
|
| 12 |
-
- Determines ML eligibility
|
| 13 |
-
|
| 14 |
-
Features:
|
| 15 |
-
- Resume support: saves after each stock, can continue from where it left off
|
| 16 |
-
- Memory management: gc.collect() between stocks
|
| 17 |
-
- Progress tracking with ETA
|
| 18 |
-
- Results saved to paper_trading/bist100_scan_results.json
|
| 19 |
-
|
| 20 |
-
Usage:
|
| 21 |
-
python3 run_bist100_scan.py # Scan BIST100
|
| 22 |
-
python3 run_bist100_scan.py --universe bist50 # Scan BIST50 only
|
| 23 |
-
python3 run_bist100_scan.py --resume # Resume interrupted scan
|
| 24 |
-
python3 run_bist100_scan.py --report # Show results only
|
| 25 |
-
python3 run_bist100_scan.py --force # Force re-scan all
|
| 26 |
"""
|
| 27 |
|
| 28 |
from __future__ import annotations
|
| 29 |
|
| 30 |
import argparse
|
| 31 |
-
import gc
|
| 32 |
-
import json
|
| 33 |
import logging
|
| 34 |
-
import sys
|
| 35 |
-
import time
|
| 36 |
-
from datetime import datetime, timezone
|
| 37 |
-
from pathlib import Path
|
| 38 |
-
from typing import Any, Dict, List, Optional, Tuple
|
| 39 |
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
logging.basicConfig(
|
| 44 |
level=logging.INFO,
|
| 45 |
format="%(asctime)s [%(levelname)s] %(message)s",
|
|
@@ -47,488 +19,32 @@ logging.basicConfig(
|
|
| 47 |
)
|
| 48 |
logger = logging.getLogger("bist100_scan")
|
| 49 |
|
| 50 |
-
# ---------------------------------------------------------------------------
|
| 51 |
-
# Config
|
| 52 |
-
# ---------------------------------------------------------------------------
|
| 53 |
-
# Write progress to a work file and only publish the final file when the scan completes.
|
| 54 |
-
# This allows trading to keep using the last completed results while a new scan is running.
|
| 55 |
-
FINAL_RESULTS_FILE = Path("paper_trading/bist100_scan_results.json")
|
| 56 |
-
WORK_RESULTS_FILE = Path("paper_trading/bist100_scan_results_work.json")
|
| 57 |
-
MIN_DATA_DAYS = 252 # At least 1 year of data required
|
| 58 |
-
MIN_AVG_VOLUME = 500_000 # Min avg daily volume (shares, not TL)
|
| 59 |
-
BACKTEST_START = "2023-01-01"
|
| 60 |
-
BACKTEST_END = "2025-12-31"
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
def _as_float(value: Any, default: float = 0.0) -> float:
|
| 64 |
-
"""Convert pandas/yfinance scalar-like results into a plain float."""
|
| 65 |
-
try:
|
| 66 |
-
current = value
|
| 67 |
-
while hasattr(current, "iloc"):
|
| 68 |
-
if len(current) == 0:
|
| 69 |
-
return default
|
| 70 |
-
current = current.iloc[0]
|
| 71 |
-
return float(current)
|
| 72 |
-
except Exception:
|
| 73 |
-
return default
|
| 74 |
-
TRAIN_WINDOW = 252 # 1 year train for eligibility scan
|
| 75 |
-
DAYS_AHEAD = 7
|
| 76 |
-
|
| 77 |
-
# Eligibility thresholds
|
| 78 |
-
MIN_DIR_ACC = 0.53
|
| 79 |
-
MIN_SHARPE = 0.0
|
| 80 |
-
MIN_HIT_RATE = 40.0
|
| 81 |
-
MIN_TRADES = 3
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
# ---------------------------------------------------------------------------
|
| 85 |
-
# Result persistence
|
| 86 |
-
# ---------------------------------------------------------------------------
|
| 87 |
-
def _load_results() -> Dict[str, Any]:
|
| 88 |
-
src = None
|
| 89 |
-
if WORK_RESULTS_FILE.exists():
|
| 90 |
-
src = WORK_RESULTS_FILE
|
| 91 |
-
elif FINAL_RESULTS_FILE.exists():
|
| 92 |
-
src = FINAL_RESULTS_FILE
|
| 93 |
-
|
| 94 |
-
if src is not None and src.exists():
|
| 95 |
-
try:
|
| 96 |
-
return json.loads(src.read_text())
|
| 97 |
-
except Exception:
|
| 98 |
-
pass
|
| 99 |
-
return {
|
| 100 |
-
"scan_started": None,
|
| 101 |
-
"universe": None,
|
| 102 |
-
"stage1": {}, # {symbol: {passed, reason, avg_volume, data_days}}
|
| 103 |
-
"stage2": {}, # {symbol: {eligible, dir_acc, sharpe, ...}}
|
| 104 |
-
"completed": False,
|
| 105 |
-
}
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
def _save_results(data: Dict[str, Any]) -> None:
|
| 109 |
-
WORK_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
| 110 |
-
data["updated_at"] = datetime.now(timezone.utc).isoformat()
|
| 111 |
-
tmp = WORK_RESULTS_FILE.with_suffix(".tmp")
|
| 112 |
-
tmp.write_text(json.dumps(data, indent=2, default=str))
|
| 113 |
-
tmp.rename(WORK_RESULTS_FILE)
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
def _publish_final_if_complete(data: Dict[str, Any]) -> None:
|
| 117 |
-
"""Publish a completed scan to the final results file atomically."""
|
| 118 |
-
if not data.get("completed", False):
|
| 119 |
-
return
|
| 120 |
-
FINAL_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
| 121 |
-
tmp = FINAL_RESULTS_FILE.with_suffix(".tmp")
|
| 122 |
-
tmp.write_text(json.dumps(data, indent=2, default=str))
|
| 123 |
-
tmp.rename(FINAL_RESULTS_FILE)
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
# ---------------------------------------------------------------------------
|
| 127 |
-
# Stage 1: Liquidity & Data Quality Pre-Filter
|
| 128 |
-
# ---------------------------------------------------------------------------
|
| 129 |
-
def stage1_prefilter(symbol: str) -> Dict[str, Any]:
|
| 130 |
-
"""Quick check: does this stock have enough data and liquidity?
|
| 131 |
-
|
| 132 |
-
Returns dict with:
|
| 133 |
-
- passed: bool
|
| 134 |
-
- reason: str
|
| 135 |
-
- avg_volume: float
|
| 136 |
-
- data_days: int
|
| 137 |
-
- last_price: float
|
| 138 |
-
"""
|
| 139 |
-
import yfinance as yf
|
| 140 |
-
|
| 141 |
-
ticker = f"{symbol}.IS"
|
| 142 |
-
result: Dict[str, Any] = {
|
| 143 |
-
"symbol": symbol,
|
| 144 |
-
"passed": False,
|
| 145 |
-
"reason": "",
|
| 146 |
-
"avg_volume": 0.0,
|
| 147 |
-
"data_days": 0,
|
| 148 |
-
"last_price": 0.0,
|
| 149 |
-
"checked_at": datetime.now(timezone.utc).isoformat(),
|
| 150 |
-
}
|
| 151 |
-
|
| 152 |
-
try:
|
| 153 |
-
df = yf.download(ticker, period="2y", progress=False, auto_adjust=True)
|
| 154 |
-
if df is None or df.empty:
|
| 155 |
-
result["reason"] = "no_data"
|
| 156 |
-
return result
|
| 157 |
-
|
| 158 |
-
data_days = len(df)
|
| 159 |
-
result["data_days"] = data_days
|
| 160 |
-
|
| 161 |
-
if data_days < MIN_DATA_DAYS:
|
| 162 |
-
result["reason"] = f"insufficient_data ({data_days} < {MIN_DATA_DAYS} days)"
|
| 163 |
-
return result
|
| 164 |
-
|
| 165 |
-
# Average daily volume
|
| 166 |
-
avg_vol = _as_float(df["Volume"].mean()) if "Volume" in df.columns else 0.0
|
| 167 |
-
result["avg_volume"] = round(avg_vol, 0)
|
| 168 |
-
|
| 169 |
-
if avg_vol < MIN_AVG_VOLUME:
|
| 170 |
-
result["reason"] = f"low_volume ({avg_vol:,.0f} < {MIN_AVG_VOLUME:,.0f})"
|
| 171 |
-
return result
|
| 172 |
-
|
| 173 |
-
# Last price (sanity check)
|
| 174 |
-
last_price = _as_float(df["Close"].iloc[-1])
|
| 175 |
-
result["last_price"] = round(last_price, 2)
|
| 176 |
-
|
| 177 |
-
if last_price < 1.0:
|
| 178 |
-
result["reason"] = f"penny_stock (price={last_price:.2f})"
|
| 179 |
-
return result
|
| 180 |
-
|
| 181 |
-
result["passed"] = True
|
| 182 |
-
result["reason"] = "OK"
|
| 183 |
-
return result
|
| 184 |
-
|
| 185 |
-
except Exception as e:
|
| 186 |
-
result["reason"] = f"error: {e}"
|
| 187 |
-
return result
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
# ---------------------------------------------------------------------------
|
| 191 |
-
# Stage 2: ML Backtest Eligibility
|
| 192 |
-
# ---------------------------------------------------------------------------
|
| 193 |
-
def stage2_backtest(symbol: str) -> Dict[str, Any]:
|
| 194 |
-
"""Full walk-forward backtest to evaluate ML eligibility."""
|
| 195 |
-
from analysis.walk_forward_backtest import walk_forward_backtest
|
| 196 |
-
|
| 197 |
-
result: Dict[str, Any] = {
|
| 198 |
-
"symbol": symbol,
|
| 199 |
-
"eligible": False,
|
| 200 |
-
"reason": "",
|
| 201 |
-
"evaluated_at": datetime.now(timezone.utc).isoformat(),
|
| 202 |
-
}
|
| 203 |
-
|
| 204 |
-
try:
|
| 205 |
-
_, m = walk_forward_backtest(
|
| 206 |
-
symbol=symbol,
|
| 207 |
-
start_date=BACKTEST_START,
|
| 208 |
-
end_date=BACKTEST_END,
|
| 209 |
-
days_ahead=DAYS_AHEAD,
|
| 210 |
-
train_window=TRAIN_WINDOW,
|
| 211 |
-
model_type="rf",
|
| 212 |
-
use_technical_gate=True,
|
| 213 |
-
initial_capital=100_000.0,
|
| 214 |
-
commission_bps=10.0,
|
| 215 |
-
slippage_bps=10.0,
|
| 216 |
-
exit_rule="signal_or_fixed",
|
| 217 |
-
max_hold_days=DAYS_AHEAD,
|
| 218 |
-
stop_loss_pct=0.05,
|
| 219 |
-
take_profit_pct=0.10,
|
| 220 |
-
trailing_stop_pct=0.07,
|
| 221 |
-
max_position_pct=0.50,
|
| 222 |
-
max_risk_per_trade_pct=0.02,
|
| 223 |
-
)
|
| 224 |
-
|
| 225 |
-
dir_acc = m["direction_accuracy"]
|
| 226 |
-
sharpe = m["sharpe"]
|
| 227 |
-
hit_rate = m.get("hit_rate_pct", 0.0)
|
| 228 |
-
total_ret = m["total_return_pct"]
|
| 229 |
-
trades = m["trades_count"]
|
| 230 |
-
max_dd = m.get("max_drawdown_pct", 0.0)
|
| 231 |
-
|
| 232 |
-
result.update({
|
| 233 |
-
"dir_acc": round(dir_acc, 4),
|
| 234 |
-
"sharpe": round(sharpe, 3),
|
| 235 |
-
"hit_rate": round(hit_rate, 1),
|
| 236 |
-
"total_return_pct": round(total_ret, 1),
|
| 237 |
-
"trades": trades,
|
| 238 |
-
"max_drawdown_pct": round(max_dd, 1),
|
| 239 |
-
})
|
| 240 |
-
|
| 241 |
-
reasons: List[str] = []
|
| 242 |
-
if dir_acc < MIN_DIR_ACC:
|
| 243 |
-
reasons.append(f"dir_acc={dir_acc:.1%}<{MIN_DIR_ACC:.0%}")
|
| 244 |
-
if sharpe < MIN_SHARPE:
|
| 245 |
-
reasons.append(f"sharpe={sharpe:.3f}<{MIN_SHARPE}")
|
| 246 |
-
if hit_rate < MIN_HIT_RATE:
|
| 247 |
-
reasons.append(f"hit_rate={hit_rate:.1f}%<{MIN_HIT_RATE}%")
|
| 248 |
-
if trades < MIN_TRADES:
|
| 249 |
-
reasons.append(f"trades={trades}<{MIN_TRADES}")
|
| 250 |
-
|
| 251 |
-
result["eligible"] = len(reasons) == 0
|
| 252 |
-
result["reason"] = "; ".join(reasons) if reasons else "OK"
|
| 253 |
-
|
| 254 |
-
return result
|
| 255 |
-
|
| 256 |
-
except Exception as e:
|
| 257 |
-
result["reason"] = f"backtest_error: {e}"
|
| 258 |
-
logger.warning("Stage 2 failed for %s: %s", symbol, e)
|
| 259 |
-
return result
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
# ---------------------------------------------------------------------------
|
| 263 |
-
# Fetch BIST universe
|
| 264 |
-
# ---------------------------------------------------------------------------
|
| 265 |
-
def get_universe(name: str) -> List[str]:
|
| 266 |
-
"""Get stock list from official Borsa Istanbul CSV."""
|
| 267 |
-
from data.index_constituents import get_index_constituents
|
| 268 |
-
|
| 269 |
-
try:
|
| 270 |
-
result = get_index_constituents(name)
|
| 271 |
-
symbols = result.symbols
|
| 272 |
-
logger.info("Fetched %d stocks from %s", len(symbols), name)
|
| 273 |
-
return symbols
|
| 274 |
-
except Exception as e:
|
| 275 |
-
logger.error("Failed to fetch universe %s: %s", name, e)
|
| 276 |
-
# Fallback: hardcoded BIST30 for testing
|
| 277 |
-
return [
|
| 278 |
-
"THYAO", "AKBNK", "GARAN", "EREGL", "SISE",
|
| 279 |
-
"TUPRS", "KCHOL", "ASELS", "BIMAS", "SAHOL",
|
| 280 |
-
"YKBNK", "HALKB", "VAKBN", "TCELL", "ARCLK",
|
| 281 |
-
"PETKM", "TOASO", "KOZAA", "KOZAL", "SASA",
|
| 282 |
-
"TAVHL", "TTKOM", "ENKAI", "FROTO", "EKGYO",
|
| 283 |
-
"PGSUS", "SOKM", "DOHOL", "GUBRF", "ISCTR",
|
| 284 |
-
]
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
# ---------------------------------------------------------------------------
|
| 288 |
-
# Main scan orchestrator
|
| 289 |
-
# ---------------------------------------------------------------------------
|
| 290 |
-
def run_scan(
|
| 291 |
-
universe: str = "bist100",
|
| 292 |
-
force: bool = False,
|
| 293 |
-
stage1_only: bool = False,
|
| 294 |
-
) -> Dict[str, Any]:
|
| 295 |
-
"""Run the 2-stage scan on the given universe."""
|
| 296 |
-
|
| 297 |
-
symbols = get_universe(universe)
|
| 298 |
-
if not symbols:
|
| 299 |
-
logger.error("No symbols found for universe: %s", universe)
|
| 300 |
-
return {}
|
| 301 |
-
|
| 302 |
-
data = _load_results()
|
| 303 |
-
|
| 304 |
-
# Check if we should start fresh or resume
|
| 305 |
-
is_same_universe = data.get("universe") == universe
|
| 306 |
-
if not is_same_universe or force:
|
| 307 |
-
logger.info("Starting fresh scan for %s (%d stocks)", universe, len(symbols))
|
| 308 |
-
data = {
|
| 309 |
-
"scan_started": datetime.now(timezone.utc).isoformat(),
|
| 310 |
-
"universe": universe,
|
| 311 |
-
"total_stocks": len(symbols),
|
| 312 |
-
"stage1": {},
|
| 313 |
-
"stage2": {},
|
| 314 |
-
"completed": False,
|
| 315 |
-
}
|
| 316 |
-
_save_results(data)
|
| 317 |
-
else:
|
| 318 |
-
s1_done = len(data.get("stage1", {}))
|
| 319 |
-
s2_done = len(data.get("stage2", {}))
|
| 320 |
-
logger.info(
|
| 321 |
-
"Resuming scan: %d/%d Stage1, %d Stage2 done",
|
| 322 |
-
s1_done, len(symbols), s2_done,
|
| 323 |
-
)
|
| 324 |
-
|
| 325 |
-
# ===== STAGE 1: Liquidity Pre-Filter =====
|
| 326 |
-
logger.info("=" * 60)
|
| 327 |
-
logger.info("STAGE 1: Liquidity & Data Quality Pre-Filter")
|
| 328 |
-
logger.info("=" * 60)
|
| 329 |
-
|
| 330 |
-
s1_todo = [s for s in symbols if s not in data.get("stage1", {})]
|
| 331 |
-
total_s1 = len(symbols)
|
| 332 |
-
done_s1 = total_s1 - len(s1_todo)
|
| 333 |
-
|
| 334 |
-
for i, sym in enumerate(s1_todo, start=done_s1 + 1):
|
| 335 |
-
t0 = time.time()
|
| 336 |
-
result = stage1_prefilter(sym)
|
| 337 |
-
elapsed = time.time() - t0
|
| 338 |
-
|
| 339 |
-
status = "PASS" if result["passed"] else f"FAIL ({result['reason']})"
|
| 340 |
-
logger.info(
|
| 341 |
-
"[Stage1 %d/%d] %s: %s (%.1fs)",
|
| 342 |
-
i, total_s1, sym, status, elapsed,
|
| 343 |
-
)
|
| 344 |
-
|
| 345 |
-
data.setdefault("stage1", {})[sym] = result
|
| 346 |
-
_save_results(data)
|
| 347 |
-
gc.collect()
|
| 348 |
-
|
| 349 |
-
# Summary
|
| 350 |
-
s1_passed = [s for s, v in data["stage1"].items() if v.get("passed")]
|
| 351 |
-
s1_failed = [s for s, v in data["stage1"].items() if not v.get("passed")]
|
| 352 |
-
logger.info("")
|
| 353 |
-
logger.info("Stage 1 Results: %d PASS / %d FAIL out of %d",
|
| 354 |
-
len(s1_passed), len(s1_failed), total_s1)
|
| 355 |
-
logger.info("Passed: %s", ", ".join(sorted(s1_passed)))
|
| 356 |
-
logger.info("")
|
| 357 |
-
|
| 358 |
-
if stage1_only:
|
| 359 |
-
data["completed"] = True
|
| 360 |
-
_save_results(data)
|
| 361 |
-
_publish_final_if_complete(data)
|
| 362 |
-
return data
|
| 363 |
-
|
| 364 |
-
# ===== STAGE 2: ML Backtest =====
|
| 365 |
-
logger.info("=" * 60)
|
| 366 |
-
logger.info("STAGE 2: ML Walk-Forward Backtest")
|
| 367 |
-
logger.info("=" * 60)
|
| 368 |
-
|
| 369 |
-
s2_todo = [s for s in s1_passed if s not in data.get("stage2", {})]
|
| 370 |
-
total_s2 = len(s1_passed)
|
| 371 |
-
done_s2 = total_s2 - len(s2_todo)
|
| 372 |
-
|
| 373 |
-
times_s2: List[float] = []
|
| 374 |
-
|
| 375 |
-
for i, sym in enumerate(s2_todo, start=done_s2 + 1):
|
| 376 |
-
t0 = time.time()
|
| 377 |
-
result = stage2_backtest(sym)
|
| 378 |
-
elapsed = time.time() - t0
|
| 379 |
-
times_s2.append(elapsed)
|
| 380 |
-
|
| 381 |
-
if result["eligible"]:
|
| 382 |
-
status = (
|
| 383 |
-
f"ELIGIBLE (ret={result.get('total_return_pct', 0):.1f}%, "
|
| 384 |
-
f"sharpe={result.get('sharpe', 0):.3f}, "
|
| 385 |
-
f"hit={result.get('hit_rate', 0):.0f}%)"
|
| 386 |
-
)
|
| 387 |
-
else:
|
| 388 |
-
status = f"EXCLUDED ({result['reason']})"
|
| 389 |
-
|
| 390 |
-
# ETA
|
| 391 |
-
avg_time = sum(times_s2) / len(times_s2)
|
| 392 |
-
remaining = total_s2 - i
|
| 393 |
-
eta_min = (remaining * avg_time) / 60
|
| 394 |
-
|
| 395 |
-
logger.info(
|
| 396 |
-
"[Stage2 %d/%d] %s: %s (%.1fs, ETA: %.0f min)",
|
| 397 |
-
i, total_s2, sym, status, elapsed, eta_min,
|
| 398 |
-
)
|
| 399 |
-
|
| 400 |
-
data.setdefault("stage2", {})[sym] = result
|
| 401 |
-
_save_results(data)
|
| 402 |
-
gc.collect()
|
| 403 |
-
|
| 404 |
-
# ===== Final Report =====
|
| 405 |
-
data["completed"] = True
|
| 406 |
-
data["scan_finished"] = datetime.now(timezone.utc).isoformat()
|
| 407 |
-
_save_results(data)
|
| 408 |
-
_publish_final_if_complete(data)
|
| 409 |
-
|
| 410 |
-
print_report(data)
|
| 411 |
-
return data
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
# ---------------------------------------------------------------------------
|
| 415 |
-
# Report
|
| 416 |
-
# ---------------------------------------------------------------------------
|
| 417 |
-
def print_report(data: Optional[Dict[str, Any]] = None) -> None:
|
| 418 |
-
"""Print a formatted scan report."""
|
| 419 |
-
if data is None:
|
| 420 |
-
data = _load_results()
|
| 421 |
-
|
| 422 |
-
s2 = data.get("stage2", {})
|
| 423 |
-
if not s2:
|
| 424 |
-
logger.info("No Stage 2 results found. Run scan first.")
|
| 425 |
-
return
|
| 426 |
-
|
| 427 |
-
eligible = {s: v for s, v in s2.items() if v.get("eligible")}
|
| 428 |
-
excluded = {s: v for s, v in s2.items() if not v.get("eligible")}
|
| 429 |
-
|
| 430 |
-
print("\n" + "=" * 70)
|
| 431 |
-
print(f" BIST STOCK SCAN RESULTS — {data.get('universe', '?').upper()}")
|
| 432 |
-
print("=" * 70)
|
| 433 |
-
|
| 434 |
-
# Stage 1 stats
|
| 435 |
-
s1 = data.get("stage1", {})
|
| 436 |
-
s1_pass = sum(1 for v in s1.values() if v.get("passed"))
|
| 437 |
-
s1_fail = sum(1 for v in s1.values() if not v.get("passed"))
|
| 438 |
-
print(f"\nStage 1 (Liquidity Filter): {s1_pass} pass / {s1_fail} fail / {len(s1)} total")
|
| 439 |
-
print(f"Stage 2 (ML Backtest): {len(eligible)} eligible / {len(excluded)} excluded / {len(s2)} tested")
|
| 440 |
-
|
| 441 |
-
# Eligible stocks sorted by Sharpe
|
| 442 |
-
if eligible:
|
| 443 |
-
print(f"\n{'─' * 70}")
|
| 444 |
-
print(f" ELIGIBLE STOCKS ({len(eligible)})")
|
| 445 |
-
print(f"{'─' * 70}")
|
| 446 |
-
print(f" {'Symbol':<10} {'Return%':>9} {'Sharpe':>8} {'HitRate%':>9} {'MaxDD%':>8} {'Trades':>7}")
|
| 447 |
-
print(f" {'─'*10} {'─'*9} {'─'*8} {'─'*9} {'─'*8} {'─'*7}")
|
| 448 |
-
|
| 449 |
-
sorted_eligible = sorted(
|
| 450 |
-
eligible.items(),
|
| 451 |
-
key=lambda kv: kv[1].get("sharpe", -999),
|
| 452 |
-
reverse=True,
|
| 453 |
-
)
|
| 454 |
-
for sym, m in sorted_eligible:
|
| 455 |
-
print(
|
| 456 |
-
f" {sym:<10} {m.get('total_return_pct', 0):>+8.1f}% "
|
| 457 |
-
f"{m.get('sharpe', 0):>8.3f} "
|
| 458 |
-
f"{m.get('hit_rate', 0):>8.1f}% "
|
| 459 |
-
f"{m.get('max_drawdown_pct', 0):>7.1f}% "
|
| 460 |
-
f"{m.get('trades', 0):>7d}"
|
| 461 |
-
)
|
| 462 |
-
|
| 463 |
-
# Portfolio summary
|
| 464 |
-
avg_ret = sum(m.get("total_return_pct", 0) for m in eligible.values()) / len(eligible)
|
| 465 |
-
avg_sharpe = sum(m.get("sharpe", 0) for m in eligible.values()) / len(eligible)
|
| 466 |
-
avg_hit = sum(m.get("hit_rate", 0) for m in eligible.values()) / len(eligible)
|
| 467 |
-
print(f"\n Average: {avg_ret:>+8.1f}% {avg_sharpe:>8.3f} {avg_hit:>8.1f}%")
|
| 468 |
-
|
| 469 |
-
# Excluded stocks summary
|
| 470 |
-
if excluded:
|
| 471 |
-
print(f"\n{'─' * 70}")
|
| 472 |
-
print(f" EXCLUDED STOCKS ({len(excluded)})")
|
| 473 |
-
print(f"{'─' * 70}")
|
| 474 |
-
for sym, m in sorted(excluded.items()):
|
| 475 |
-
ret = m.get("total_return_pct", "?")
|
| 476 |
-
reason = m.get("reason", "?")
|
| 477 |
-
ret_str = f"{ret:>+.1f}%" if isinstance(ret, (int, float)) else "N/A"
|
| 478 |
-
print(f" {sym:<10} {ret_str:>9} {reason}")
|
| 479 |
-
|
| 480 |
-
# Stage 1 failures
|
| 481 |
-
s1_failures = {s: v for s, v in s1.items() if not v.get("passed")}
|
| 482 |
-
if s1_failures:
|
| 483 |
-
print(f"\n{'─' * 70}")
|
| 484 |
-
print(f" STAGE 1 FAILURES — Skipped ({len(s1_failures)})")
|
| 485 |
-
print(f"{'─' * 70}")
|
| 486 |
-
for sym, v in sorted(s1_failures.items()):
|
| 487 |
-
print(f" {sym:<10} {v.get('reason', '?')}")
|
| 488 |
-
|
| 489 |
-
print(f"\n{'=' * 70}")
|
| 490 |
-
print(f" Scan: {data.get('scan_started', '?')} → {data.get('scan_finished', 'in progress')}")
|
| 491 |
-
print(f"{'=' * 70}\n")
|
| 492 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 493 |
|
| 494 |
-
# ---------------------------------------------------------------------------
|
| 495 |
-
# Entry point
|
| 496 |
-
# ---------------------------------------------------------------------------
|
| 497 |
-
def main():
|
| 498 |
-
parser = argparse.ArgumentParser(description="BIST Stock Scanner")
|
| 499 |
-
parser.add_argument(
|
| 500 |
-
"--universe", default="bist100",
|
| 501 |
-
help="Universe to scan: bist30, bist50, bist100, all (default: bist100)",
|
| 502 |
-
)
|
| 503 |
-
parser.add_argument(
|
| 504 |
-
"--resume", action="store_true",
|
| 505 |
-
help="Resume interrupted scan (default behavior if same universe)",
|
| 506 |
-
)
|
| 507 |
-
parser.add_argument(
|
| 508 |
-
"--force", action="store_true",
|
| 509 |
-
help="Force re-scan all stocks (ignore cache)",
|
| 510 |
-
)
|
| 511 |
-
parser.add_argument(
|
| 512 |
-
"--report", action="store_true",
|
| 513 |
-
help="Show results only, don't scan",
|
| 514 |
-
)
|
| 515 |
-
parser.add_argument(
|
| 516 |
-
"--stage1-only", action="store_true",
|
| 517 |
-
help="Run only Stage 1 (liquidity filter) — fast",
|
| 518 |
-
)
|
| 519 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 520 |
args = parser.parse_args()
|
| 521 |
|
|
|
|
| 522 |
if args.report:
|
| 523 |
-
print_report()
|
| 524 |
-
return
|
| 525 |
|
| 526 |
-
|
| 527 |
-
|
| 528 |
-
|
| 529 |
-
stage1_only=args.stage1_only,
|
| 530 |
-
)
|
| 531 |
|
| 532 |
|
| 533 |
if __name__ == "__main__":
|
| 534 |
-
main()
|
|
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
+
"""BIST batch stock scanner.
|
| 3 |
|
| 4 |
+
Thin wrapper around the shared scanner engine.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
"""
|
| 6 |
|
| 7 |
from __future__ import annotations
|
| 8 |
|
| 9 |
import argparse
|
|
|
|
|
|
|
| 10 |
import logging
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
|
| 12 |
+
from trading.scanner_engine import ScanConfig, print_report, run_scan
|
| 13 |
+
|
| 14 |
+
|
| 15 |
logging.basicConfig(
|
| 16 |
level=logging.INFO,
|
| 17 |
format="%(asctime)s [%(levelname)s] %(message)s",
|
|
|
|
| 19 |
)
|
| 20 |
logger = logging.getLogger("bist100_scan")
|
| 21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
+
CONFIGS = {
|
| 24 |
+
"bist30": ScanConfig(market_id="bist", universe_name="bist30", title="BIST"),
|
| 25 |
+
"bist50": ScanConfig(market_id="bist", universe_name="bist50", title="BIST"),
|
| 26 |
+
"bist100": ScanConfig(market_id="bist", universe_name="bist100", title="BIST"),
|
| 27 |
+
}
|
| 28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
|
| 30 |
+
def main() -> int:
|
| 31 |
+
parser = argparse.ArgumentParser(description="BIST batch stock scanner")
|
| 32 |
+
parser.add_argument("--universe", default="bist100", choices=sorted(CONFIGS))
|
| 33 |
+
parser.add_argument("--resume", action="store_true", help="Resume interrupted scan")
|
| 34 |
+
parser.add_argument("--report", action="store_true", help="Show latest report only")
|
| 35 |
+
parser.add_argument("--force", action="store_true", help="Force a fresh scan")
|
| 36 |
+
parser.add_argument("--stage1-only", action="store_true", help="Run only liquidity pre-filter")
|
| 37 |
args = parser.parse_args()
|
| 38 |
|
| 39 |
+
config = CONFIGS[args.universe]
|
| 40 |
if args.report:
|
| 41 |
+
print_report(config, logger)
|
| 42 |
+
return 0
|
| 43 |
|
| 44 |
+
force = args.force or not args.resume
|
| 45 |
+
run_scan(config, logger, force=force, stage1_only=args.stage1_only)
|
| 46 |
+
return 0
|
|
|
|
|
|
|
| 47 |
|
| 48 |
|
| 49 |
if __name__ == "__main__":
|
| 50 |
+
raise SystemExit(main())
|
huggingface-space/run_us_scan.py
CHANGED
|
@@ -1,44 +1,15 @@
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
-
"""US
|
| 3 |
-
|
| 4 |
-
Stage 1: Quick liquidity & data quality pre-filter via yfinance
|
| 5 |
-
- Minimum 1 year of daily data
|
| 6 |
-
- Minimum average daily volume (configurable)
|
| 7 |
-
- Minimum share price ($5+)
|
| 8 |
-
- Eliminates stocks that would waste backtest time
|
| 9 |
-
|
| 10 |
-
Stage 2: Full ML walk-forward backtest (only for Stage 1 passers)
|
| 11 |
-
- Uses the v3 classification ensemble
|
| 12 |
-
- Evaluates direction accuracy, Sharpe, hit rate
|
| 13 |
-
- Determines ML eligibility
|
| 14 |
-
|
| 15 |
-
Features:
|
| 16 |
-
- Resume support: saves after each stock, can continue from where it left off
|
| 17 |
-
- Memory management: gc.collect() between stocks
|
| 18 |
-
- Progress tracking with ETA
|
| 19 |
-
- Results saved to paper_trading/markets/us/scan_results.json
|
| 20 |
-
|
| 21 |
-
Usage:
|
| 22 |
-
python3 run_us_scan.py # Scan SP100
|
| 23 |
-
python3 run_us_scan.py --universe sp500 # Scan SP500
|
| 24 |
-
python3 run_us_scan.py --force # Force re-scan all
|
| 25 |
-
"""
|
| 26 |
|
| 27 |
from __future__ import annotations
|
| 28 |
|
| 29 |
-
import
|
| 30 |
-
import
|
| 31 |
import logging
|
| 32 |
-
import time
|
| 33 |
-
from datetime import datetime, timezone
|
| 34 |
-
from io import StringIO
|
| 35 |
-
from pathlib import Path
|
| 36 |
-
from typing import Any, Dict, List, Optional, Tuple
|
| 37 |
-
from urllib.request import Request, urlopen
|
| 38 |
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
logging.basicConfig(
|
| 43 |
level=logging.INFO,
|
| 44 |
format="%(asctime)s [%(levelname)s] %(message)s",
|
|
@@ -46,425 +17,38 @@ logging.basicConfig(
|
|
| 46 |
)
|
| 47 |
logger = logging.getLogger("us_scan")
|
| 48 |
|
| 49 |
-
# ---------------------------------------------------------------------------
|
| 50 |
-
# Config
|
| 51 |
-
# ---------------------------------------------------------------------------
|
| 52 |
-
FINAL_RESULTS_FILE = Path("paper_trading/markets/us/scan_results.json")
|
| 53 |
-
WORK_RESULTS_FILE = Path("paper_trading/markets/us/scan_results_work.json")
|
| 54 |
-
|
| 55 |
-
MIN_DATA_DAYS = 252 # At least 1 year of data
|
| 56 |
-
MIN_AVG_VOLUME = 1_000_000 # Min avg daily volume (shares)
|
| 57 |
-
MIN_PRICE = 5.0 # Min share price in USD
|
| 58 |
-
BACKTEST_START = "2023-01-01"
|
| 59 |
-
BACKTEST_END = "2025-12-31"
|
| 60 |
-
TRAIN_WINDOW = 252
|
| 61 |
-
DAYS_AHEAD = 7
|
| 62 |
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
MIN_TRADES = 3
|
| 68 |
-
|
| 69 |
-
_DEFAULT_HTML_HEADERS = {
|
| 70 |
-
"User-Agent": (
|
| 71 |
-
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
|
| 72 |
-
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
| 73 |
-
"Chrome/123.0.0.0 Safari/537.36"
|
| 74 |
-
),
|
| 75 |
-
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
|
| 76 |
-
"Accept-Language": "en-US,en;q=0.9",
|
| 77 |
}
|
| 78 |
|
| 79 |
-
# Hardcoded SP100 fallback in case Wikipedia scraping fails
|
| 80 |
-
_SP100_FALLBACK = [
|
| 81 |
-
"AAPL", "MSFT", "AMZN", "NVDA", "GOOGL", "GOOG", "META", "TSLA", "BRK-B",
|
| 82 |
-
"UNH", "XOM", "JNJ", "JPM", "V", "PG", "MA", "HD", "CVX", "LLY", "ABBV",
|
| 83 |
-
"MRK", "PEP", "COST", "KO", "AVGO", "TMO", "WMT", "MCD", "BAC", "ABT",
|
| 84 |
-
"PM", "CRM", "ACN", "NEE", "NKE", "LIN", "DHR", "TXN", "ORCL", "VZ",
|
| 85 |
-
"ADBE", "BMY", "QCOM", "T", "UPS", "RTX", "AMGN", "LOW", "INTU", "HON",
|
| 86 |
-
"IBM", "SBUX", "ELV", "MDT", "GE", "CAT", "GS", "MS", "NFLX", "NOW",
|
| 87 |
-
"AXP", "BKNG", "ISRG", "DE", "CVS", "GILD", "AMD", "AMAT", "PYPL", "TGT",
|
| 88 |
-
"MDLZ", "CI", "BLK", "ADP", "VRTX", "SYK", "TJX", "CB", "LMT", "C",
|
| 89 |
-
"MMC", "REGN", "SO", "DUK", "BSX", "EOG", "NOC", "FDX", "MU", "PLD",
|
| 90 |
-
"WM", "SLB", "ADI", "ETN", "APD", "ZTS", "HCA", "EMR", "AON", "USB",
|
| 91 |
-
]
|
| 92 |
-
|
| 93 |
-
# ---------------------------------------------------------------------------
|
| 94 |
-
# Universe fetching
|
| 95 |
-
# ---------------------------------------------------------------------------
|
| 96 |
-
def _clean_us_symbol(raw: str) -> str:
|
| 97 |
-
token = str(raw or "").strip().upper()
|
| 98 |
-
return token.replace(".", "-")
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
def _fetch_universe_from_wikipedia(name: str) -> List[str]:
|
| 102 |
-
"""Fetch US stock universe from Wikipedia. Returns [] on failure."""
|
| 103 |
-
source_map = {
|
| 104 |
-
"sp100": ("https://en.wikipedia.org/wiki/S%26P_100", "Symbol"),
|
| 105 |
-
"sp500": ("https://en.wikipedia.org/wiki/List_of_S%26P_500_companies", "Symbol"),
|
| 106 |
-
"nasdaq100": ("https://en.wikipedia.org/wiki/Nasdaq-100", "Ticker"),
|
| 107 |
-
}
|
| 108 |
-
if name not in source_map:
|
| 109 |
-
return []
|
| 110 |
-
|
| 111 |
-
url, col_name = source_map[name]
|
| 112 |
-
try:
|
| 113 |
-
import pandas as pd
|
| 114 |
-
request = Request(url, headers=_DEFAULT_HTML_HEADERS)
|
| 115 |
-
with urlopen(request, timeout=20) as response:
|
| 116 |
-
charset = response.headers.get_content_charset() or "utf-8"
|
| 117 |
-
html = response.read().decode(charset, errors="replace")
|
| 118 |
-
tables = pd.read_html(StringIO(html))
|
| 119 |
-
for table in tables:
|
| 120 |
-
columns = {str(c).strip() for c in table.columns}
|
| 121 |
-
if col_name in columns:
|
| 122 |
-
symbols = []
|
| 123 |
-
seen: set = set()
|
| 124 |
-
for item in table[col_name].tolist():
|
| 125 |
-
sym = _clean_us_symbol(item)
|
| 126 |
-
if sym and sym not in seen:
|
| 127 |
-
seen.add(sym)
|
| 128 |
-
symbols.append(sym)
|
| 129 |
-
logger.info("Fetched %d symbols for %s from Wikipedia", len(symbols), name)
|
| 130 |
-
return symbols
|
| 131 |
-
except Exception as e:
|
| 132 |
-
logger.warning("Wikipedia fetch failed for %s: %s", name, e)
|
| 133 |
-
return []
|
| 134 |
-
|
| 135 |
-
|
| 136 |
-
def get_universe(name: str) -> List[str]:
|
| 137 |
-
"""Get US stock universe by name, with Wikipedia as primary and hardcoded fallback."""
|
| 138 |
-
name = (name or "sp100").strip().lower()
|
| 139 |
-
symbols = _fetch_universe_from_wikipedia(name)
|
| 140 |
-
if symbols:
|
| 141 |
-
return symbols
|
| 142 |
-
|
| 143 |
-
# Fallback
|
| 144 |
-
if name in ("sp100", "sp500", "nasdaq100", "all"):
|
| 145 |
-
logger.warning("Using hardcoded SP100 fallback for universe: %s", name)
|
| 146 |
-
return _SP100_FALLBACK[:]
|
| 147 |
-
|
| 148 |
-
logger.error("Unknown US universe: %s", name)
|
| 149 |
-
return _SP100_FALLBACK[:]
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
# ---------------------------------------------------------------------------
|
| 153 |
-
# Result persistence
|
| 154 |
-
# ---------------------------------------------------------------------------
|
| 155 |
-
def _load_results() -> Dict[str, Any]:
|
| 156 |
-
src = None
|
| 157 |
-
if WORK_RESULTS_FILE.exists():
|
| 158 |
-
src = WORK_RESULTS_FILE
|
| 159 |
-
elif FINAL_RESULTS_FILE.exists():
|
| 160 |
-
src = FINAL_RESULTS_FILE
|
| 161 |
-
|
| 162 |
-
if src is not None:
|
| 163 |
-
try:
|
| 164 |
-
return json.loads(src.read_text())
|
| 165 |
-
except Exception:
|
| 166 |
-
pass
|
| 167 |
-
return {
|
| 168 |
-
"scan_started": None,
|
| 169 |
-
"universe": None,
|
| 170 |
-
"market_id": "us",
|
| 171 |
-
"stage1": {},
|
| 172 |
-
"stage2": {},
|
| 173 |
-
"completed": False,
|
| 174 |
-
}
|
| 175 |
-
|
| 176 |
-
|
| 177 |
-
def _save_results(data: Dict[str, Any]) -> None:
|
| 178 |
-
WORK_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
| 179 |
-
data["updated_at"] = datetime.now(timezone.utc).isoformat()
|
| 180 |
-
tmp = WORK_RESULTS_FILE.with_suffix(".tmp")
|
| 181 |
-
tmp.write_text(json.dumps(data, indent=2, default=str))
|
| 182 |
-
tmp.rename(WORK_RESULTS_FILE)
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
def _publish_final_if_complete(data: Dict[str, Any]) -> None:
|
| 186 |
-
if not data.get("completed", False):
|
| 187 |
-
return
|
| 188 |
-
FINAL_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
| 189 |
-
tmp = FINAL_RESULTS_FILE.with_suffix(".tmp")
|
| 190 |
-
tmp.write_text(json.dumps(data, indent=2, default=str))
|
| 191 |
-
tmp.rename(FINAL_RESULTS_FILE)
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
# ---------------------------------------------------------------------------
|
| 195 |
-
# Stage 1: Liquidity & Data Quality Pre-Filter
|
| 196 |
-
# ---------------------------------------------------------------------------
|
| 197 |
-
def stage1_prefilter(symbol: str) -> Dict[str, Any]:
|
| 198 |
-
"""Quick check: does this US stock have enough data and liquidity?"""
|
| 199 |
-
import yfinance as yf
|
| 200 |
-
|
| 201 |
-
result: Dict[str, Any] = {
|
| 202 |
-
"symbol": symbol,
|
| 203 |
-
"passed": False,
|
| 204 |
-
"reason": "",
|
| 205 |
-
"avg_volume": 0.0,
|
| 206 |
-
"data_days": 0,
|
| 207 |
-
"last_price": 0.0,
|
| 208 |
-
"checked_at": datetime.now(timezone.utc).isoformat(),
|
| 209 |
-
}
|
| 210 |
-
|
| 211 |
-
try:
|
| 212 |
-
df = yf.download(symbol, period="2y", progress=False, auto_adjust=True)
|
| 213 |
-
if df is None or df.empty:
|
| 214 |
-
result["reason"] = "no_data"
|
| 215 |
-
return result
|
| 216 |
-
|
| 217 |
-
# Newer yfinance (>=0.2.x) returns multi-level columns for single-ticker downloads.
|
| 218 |
-
# Flatten them so df["Volume"] / df["Close"] return plain Series.
|
| 219 |
-
if hasattr(df.columns, "nlevels") and df.columns.nlevels > 1:
|
| 220 |
-
df.columns = df.columns.get_level_values(0)
|
| 221 |
-
|
| 222 |
-
data_days = len(df)
|
| 223 |
-
result["data_days"] = data_days
|
| 224 |
-
|
| 225 |
-
if data_days < MIN_DATA_DAYS:
|
| 226 |
-
result["reason"] = f"insufficient_data ({data_days} < {MIN_DATA_DAYS} days)"
|
| 227 |
-
return result
|
| 228 |
-
|
| 229 |
-
try:
|
| 230 |
-
avg_vol = float(df["Volume"].mean()) if "Volume" in df.columns else 0.0
|
| 231 |
-
except (TypeError, ValueError):
|
| 232 |
-
avg_vol = 0.0
|
| 233 |
-
result["avg_volume"] = round(avg_vol, 0)
|
| 234 |
-
|
| 235 |
-
if avg_vol < MIN_AVG_VOLUME:
|
| 236 |
-
result["reason"] = f"low_volume ({avg_vol:,.0f} < {MIN_AVG_VOLUME:,.0f})"
|
| 237 |
-
return result
|
| 238 |
-
|
| 239 |
-
try:
|
| 240 |
-
last_price = float(df["Close"].iloc[-1])
|
| 241 |
-
except (TypeError, ValueError, IndexError):
|
| 242 |
-
last_price = 0.0
|
| 243 |
-
result["last_price"] = round(last_price, 2)
|
| 244 |
-
|
| 245 |
-
if last_price < MIN_PRICE:
|
| 246 |
-
result["reason"] = f"low_price (price={last_price:.2f} < {MIN_PRICE})"
|
| 247 |
-
return result
|
| 248 |
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
-
# Stage 2: ML Backtest Eligibility
|
| 260 |
-
# ---------------------------------------------------------------------------
|
| 261 |
-
def stage2_backtest(symbol: str) -> Dict[str, Any]:
|
| 262 |
-
"""Full walk-forward backtest to evaluate ML eligibility for US stock."""
|
| 263 |
-
from analysis.walk_forward_backtest import walk_forward_backtest
|
| 264 |
-
|
| 265 |
-
result: Dict[str, Any] = {
|
| 266 |
-
"symbol": symbol,
|
| 267 |
-
"eligible": False,
|
| 268 |
-
"reason": "",
|
| 269 |
-
"evaluated_at": datetime.now(timezone.utc).isoformat(),
|
| 270 |
-
}
|
| 271 |
-
|
| 272 |
-
try:
|
| 273 |
-
_, m = walk_forward_backtest(
|
| 274 |
-
symbol=symbol,
|
| 275 |
-
start_date=BACKTEST_START,
|
| 276 |
-
end_date=BACKTEST_END,
|
| 277 |
-
market_id="us",
|
| 278 |
-
days_ahead=DAYS_AHEAD,
|
| 279 |
-
train_window=TRAIN_WINDOW,
|
| 280 |
-
model_type="rf",
|
| 281 |
-
use_technical_gate=True,
|
| 282 |
-
initial_capital=100_000.0,
|
| 283 |
-
commission_bps=5.0, # Lower US commission
|
| 284 |
-
slippage_bps=5.0,
|
| 285 |
-
exit_rule="signal_or_fixed",
|
| 286 |
-
max_hold_days=DAYS_AHEAD,
|
| 287 |
-
stop_loss_pct=0.05,
|
| 288 |
-
take_profit_pct=0.10,
|
| 289 |
-
trailing_stop_pct=0.07,
|
| 290 |
-
max_position_pct=0.50,
|
| 291 |
-
max_risk_per_trade_pct=0.02,
|
| 292 |
-
)
|
| 293 |
-
|
| 294 |
-
dir_acc = m["direction_accuracy"]
|
| 295 |
-
sharpe = m["sharpe"]
|
| 296 |
-
hit_rate = m.get("hit_rate_pct", 0.0)
|
| 297 |
-
total_ret = m["total_return_pct"]
|
| 298 |
-
trades = m["trades_count"]
|
| 299 |
-
max_dd = m.get("max_drawdown_pct", 0.0)
|
| 300 |
-
|
| 301 |
-
result.update({
|
| 302 |
-
"dir_acc": round(dir_acc, 4),
|
| 303 |
-
"sharpe": round(sharpe, 3),
|
| 304 |
-
"hit_rate": round(hit_rate, 1),
|
| 305 |
-
"total_return_pct": round(total_ret, 1),
|
| 306 |
-
"trades": trades,
|
| 307 |
-
"max_drawdown_pct": round(max_dd, 1),
|
| 308 |
-
})
|
| 309 |
-
|
| 310 |
-
reasons: List[str] = []
|
| 311 |
-
if dir_acc < MIN_DIR_ACC:
|
| 312 |
-
reasons.append(f"dir_acc={dir_acc:.1%}<{MIN_DIR_ACC:.0%}")
|
| 313 |
-
if sharpe < MIN_SHARPE:
|
| 314 |
-
reasons.append(f"sharpe={sharpe:.3f}<{MIN_SHARPE}")
|
| 315 |
-
if hit_rate < MIN_HIT_RATE:
|
| 316 |
-
reasons.append(f"hit_rate={hit_rate:.1f}%<{MIN_HIT_RATE}%")
|
| 317 |
-
if trades < MIN_TRADES:
|
| 318 |
-
reasons.append(f"trades={trades}<{MIN_TRADES}")
|
| 319 |
-
|
| 320 |
-
result["eligible"] = len(reasons) == 0
|
| 321 |
-
result["reason"] = "; ".join(reasons) if reasons else "OK"
|
| 322 |
-
return result
|
| 323 |
-
|
| 324 |
-
except Exception as e:
|
| 325 |
-
result["reason"] = f"backtest_error: {e}"
|
| 326 |
-
logger.warning("Stage 2 failed for %s: %s", symbol, e)
|
| 327 |
-
return result
|
| 328 |
-
|
| 329 |
-
|
| 330 |
-
# ---------------------------------------------------------------------------
|
| 331 |
-
# Main scan orchestrator
|
| 332 |
-
# ---------------------------------------------------------------------------
|
| 333 |
-
def run_scan(
|
| 334 |
-
universe: str = "sp100",
|
| 335 |
-
force: bool = False,
|
| 336 |
-
stage1_only: bool = False,
|
| 337 |
-
) -> Dict[str, Any]:
|
| 338 |
-
"""Run the 2-stage US scan on the given universe.
|
| 339 |
-
|
| 340 |
-
This function is called from app.py background scheduler and the scan action endpoint.
|
| 341 |
-
"""
|
| 342 |
-
symbols = get_universe(universe)
|
| 343 |
-
if not symbols:
|
| 344 |
-
logger.error("No symbols found for universe: %s", universe)
|
| 345 |
-
return {}
|
| 346 |
-
|
| 347 |
-
data = _load_results()
|
| 348 |
-
|
| 349 |
-
is_same_universe = data.get("universe") == universe
|
| 350 |
-
if not is_same_universe or force:
|
| 351 |
-
logger.info("Starting fresh US scan for %s (%d stocks)", universe, len(symbols))
|
| 352 |
-
data = {
|
| 353 |
-
"scan_started": datetime.now(timezone.utc).isoformat(),
|
| 354 |
-
"universe": universe,
|
| 355 |
-
"market_id": "us",
|
| 356 |
-
"total_stocks": len(symbols),
|
| 357 |
-
"stage1": {},
|
| 358 |
-
"stage2": {},
|
| 359 |
-
"completed": False,
|
| 360 |
-
}
|
| 361 |
-
_save_results(data)
|
| 362 |
-
else:
|
| 363 |
-
s1_done = len(data.get("stage1", {}))
|
| 364 |
-
s2_done = len(data.get("stage2", {}))
|
| 365 |
-
logger.info(
|
| 366 |
-
"Resuming US scan: %d/%d Stage1, %d Stage2 done",
|
| 367 |
-
s1_done, len(symbols), s2_done,
|
| 368 |
-
)
|
| 369 |
-
|
| 370 |
-
# ===== STAGE 1: Liquidity Pre-Filter =====
|
| 371 |
-
logger.info("=" * 60)
|
| 372 |
-
logger.info("STAGE 1: US Liquidity & Data Quality Pre-Filter")
|
| 373 |
-
logger.info("=" * 60)
|
| 374 |
-
|
| 375 |
-
s1_todo = [s for s in symbols if s not in data.get("stage1", {})]
|
| 376 |
-
total_s1 = len(symbols)
|
| 377 |
-
done_s1 = total_s1 - len(s1_todo)
|
| 378 |
-
|
| 379 |
-
for i, sym in enumerate(s1_todo, start=done_s1 + 1):
|
| 380 |
-
t0 = time.time()
|
| 381 |
-
result = stage1_prefilter(sym)
|
| 382 |
-
elapsed = time.time() - t0
|
| 383 |
-
status = "PASS" if result["passed"] else f"FAIL ({result['reason']})"
|
| 384 |
-
logger.info("[Stage1 %d/%d] %s: %s (%.1fs)", i, total_s1, sym, status, elapsed)
|
| 385 |
-
data.setdefault("stage1", {})[sym] = result
|
| 386 |
-
_save_results(data)
|
| 387 |
-
gc.collect()
|
| 388 |
-
|
| 389 |
-
s1_passed = [s for s, v in data["stage1"].items() if v.get("passed")]
|
| 390 |
-
s1_failed = [s for s, v in data["stage1"].items() if not v.get("passed")]
|
| 391 |
-
logger.info("")
|
| 392 |
-
logger.info(
|
| 393 |
-
"Stage 1 Results: %d PASS / %d FAIL out of %d",
|
| 394 |
-
len(s1_passed), len(s1_failed), total_s1,
|
| 395 |
-
)
|
| 396 |
-
logger.info("Passed: %s", ", ".join(sorted(s1_passed)))
|
| 397 |
-
logger.info("")
|
| 398 |
-
|
| 399 |
-
if stage1_only:
|
| 400 |
-
data["completed"] = True
|
| 401 |
-
_save_results(data)
|
| 402 |
-
_publish_final_if_complete(data)
|
| 403 |
-
return data
|
| 404 |
-
|
| 405 |
-
# ===== STAGE 2: ML Backtest =====
|
| 406 |
-
logger.info("=" * 60)
|
| 407 |
-
logger.info("STAGE 2: US ML Walk-Forward Backtest")
|
| 408 |
-
logger.info("=" * 60)
|
| 409 |
-
|
| 410 |
-
s2_todo = [s for s in s1_passed if s not in data.get("stage2", {})]
|
| 411 |
-
total_s2 = len(s1_passed)
|
| 412 |
-
done_s2 = total_s2 - len(s2_todo)
|
| 413 |
-
times_s2: List[float] = []
|
| 414 |
-
|
| 415 |
-
for i, sym in enumerate(s2_todo, start=done_s2 + 1):
|
| 416 |
-
t0 = time.time()
|
| 417 |
-
result = stage2_backtest(sym)
|
| 418 |
-
elapsed = time.time() - t0
|
| 419 |
-
times_s2.append(elapsed)
|
| 420 |
-
|
| 421 |
-
if result["eligible"]:
|
| 422 |
-
status = (
|
| 423 |
-
f"ELIGIBLE (ret={result.get('total_return_pct', 0):.1f}%, "
|
| 424 |
-
f"sharpe={result.get('sharpe', 0):.3f}, "
|
| 425 |
-
f"hit={result.get('hit_rate', 0):.0f}%)"
|
| 426 |
-
)
|
| 427 |
-
else:
|
| 428 |
-
status = f"EXCLUDED ({result['reason']})"
|
| 429 |
-
|
| 430 |
-
avg_time = sum(times_s2) / len(times_s2)
|
| 431 |
-
remaining = total_s2 - i
|
| 432 |
-
eta_min = (remaining * avg_time) / 60
|
| 433 |
-
|
| 434 |
-
logger.info(
|
| 435 |
-
"[Stage2 %d/%d] %s: %s (%.1fs, ETA: %.0f min)",
|
| 436 |
-
i, total_s2, sym, status, elapsed, eta_min,
|
| 437 |
-
)
|
| 438 |
-
|
| 439 |
-
data.setdefault("stage2", {})[sym] = result
|
| 440 |
-
_save_results(data)
|
| 441 |
-
gc.collect()
|
| 442 |
|
| 443 |
-
|
| 444 |
-
|
| 445 |
-
|
| 446 |
-
|
| 447 |
-
"Stage 2 Results: %d ELIGIBLE / %d EXCLUDED out of %d",
|
| 448 |
-
len(eligible), len(excluded), total_s2,
|
| 449 |
)
|
| 450 |
-
|
| 451 |
-
|
| 452 |
-
|
| 453 |
-
data["scan_finished"] = datetime.now(timezone.utc).isoformat()
|
| 454 |
-
data["eligible_count"] = len(eligible)
|
| 455 |
-
data["eligible_symbols"] = sorted(eligible)
|
| 456 |
-
_save_results(data)
|
| 457 |
-
_publish_final_if_complete(data)
|
| 458 |
|
| 459 |
-
|
| 460 |
-
|
|
|
|
| 461 |
|
| 462 |
|
| 463 |
if __name__ == "__main__":
|
| 464 |
-
|
| 465 |
-
parser = argparse.ArgumentParser(description="US equities batch scanner")
|
| 466 |
-
parser.add_argument("--universe", default="sp100", choices=["sp100", "sp500", "nasdaq100"])
|
| 467 |
-
parser.add_argument("--force", action="store_true", help="Force fresh scan")
|
| 468 |
-
parser.add_argument("--stage1-only", action="store_true", help="Run only Stage 1")
|
| 469 |
-
args = parser.parse_args()
|
| 470 |
-
run_scan(universe=args.universe, force=args.force, stage1_only=args.stage1_only)
|
|
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
+
"""US equities batch scanner using the shared scanner engine."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
|
| 4 |
from __future__ import annotations
|
| 5 |
|
| 6 |
+
import argparse
|
| 7 |
+
from dataclasses import replace
|
| 8 |
import logging
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
|
| 10 |
+
from trading.scanner_engine import ScanConfig, print_report, run_scan
|
| 11 |
+
|
| 12 |
+
|
| 13 |
logging.basicConfig(
|
| 14 |
level=logging.INFO,
|
| 15 |
format="%(asctime)s [%(levelname)s] %(message)s",
|
|
|
|
| 17 |
)
|
| 18 |
logger = logging.getLogger("us_scan")
|
| 19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
+
CONFIGS = {
|
| 22 |
+
"sp100": ScanConfig(market_id="us", universe_name="sp100", title="US", min_avg_volume=1_000_000, min_price=5.0),
|
| 23 |
+
"sp500": ScanConfig(market_id="us", universe_name="sp500", title="US", min_avg_volume=1_500_000, min_price=5.0),
|
| 24 |
+
"nasdaq100": ScanConfig(market_id="us", universe_name="nasdaq100", title="US", min_avg_volume=1_000_000, min_price=5.0),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
}
|
| 26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
|
| 28 |
+
def main() -> int:
|
| 29 |
+
parser = argparse.ArgumentParser(description="US equities batch scanner")
|
| 30 |
+
parser.add_argument("--universe", default="sp100", choices=sorted(CONFIGS))
|
| 31 |
+
parser.add_argument("--symbols", nargs="*", help="Run scan for an explicit symbol subset")
|
| 32 |
+
parser.add_argument("--max-symbols", type=int, help="Limit fetched universe to the first N symbols")
|
| 33 |
+
parser.add_argument("--resume", action="store_true", help="Resume interrupted scan")
|
| 34 |
+
parser.add_argument("--report", action="store_true", help="Show latest report only")
|
| 35 |
+
parser.add_argument("--force", action="store_true", help="Force a fresh scan")
|
| 36 |
+
parser.add_argument("--stage1-only", action="store_true", help="Run only liquidity pre-filter")
|
| 37 |
+
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
|
| 39 |
+
config = replace(
|
| 40 |
+
CONFIGS[args.universe],
|
| 41 |
+
symbols=[str(symbol).strip().upper() for symbol in (args.symbols or []) if str(symbol).strip()] or None,
|
| 42 |
+
max_symbols=args.max_symbols,
|
|
|
|
|
|
|
| 43 |
)
|
| 44 |
+
if args.report:
|
| 45 |
+
print_report(config, logger)
|
| 46 |
+
return 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
+
force = args.force or not args.resume
|
| 49 |
+
run_scan(config, logger, force=force, stage1_only=args.stage1_only)
|
| 50 |
+
return 0
|
| 51 |
|
| 52 |
|
| 53 |
if __name__ == "__main__":
|
| 54 |
+
raise SystemExit(main())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
huggingface-space/start.sh
CHANGED
|
@@ -1,95 +1,43 @@
|
|
| 1 |
#!/bin/bash
|
| 2 |
set -e
|
| 3 |
|
| 4 |
-
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
| 5 |
-
APP_ROOT="${APP_ROOT:-$SCRIPT_DIR}"
|
| 6 |
-
PERSIST_ROOT="${PERSIST_ROOT:-/data}"
|
| 7 |
-
PYTHON_BIN="${PYTHON_BIN:-python3}"
|
| 8 |
-
|
| 9 |
-
cd "$APP_ROOT"
|
| 10 |
-
|
| 11 |
echo "[start.sh] Başlatılıyor..."
|
| 12 |
|
| 13 |
-
#
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
if mkdir -p "$PERSIST_ROOT/paper_trading/auto_trader" \
|
| 17 |
-
"$PERSIST_ROOT/paper_trading/markets/us/auto_trader" \
|
| 18 |
-
"$PERSIST_ROOT/paper_trading/markets/us" \
|
| 19 |
-
"$PERSIST_ROOT/paper_trading/journal" \
|
| 20 |
-
"$PERSIST_ROOT/paper_trading/incidents" \
|
| 21 |
-
"$PERSIST_ROOT/logs"; then
|
| 22 |
-
if [ -d "$APP_ROOT/paper_trading" ] && [ ! -L "$APP_ROOT/paper_trading" ]; then
|
| 23 |
-
rm -rf "$APP_ROOT/paper_trading"
|
| 24 |
-
fi
|
| 25 |
-
if [ -d "$APP_ROOT/logs" ] && [ ! -L "$APP_ROOT/logs" ]; then
|
| 26 |
-
rm -rf "$APP_ROOT/logs"
|
| 27 |
-
fi
|
| 28 |
-
ln -snf "$PERSIST_ROOT/paper_trading" "$APP_ROOT/paper_trading"
|
| 29 |
-
ln -snf "$PERSIST_ROOT/logs" "$APP_ROOT/logs"
|
| 30 |
-
echo "[start.sh] Persistent storage aktif: $PERSIST_ROOT/paper_trading → $APP_ROOT/paper_trading"
|
| 31 |
-
else
|
| 32 |
-
echo "[start.sh] UYARI: $PERSIST_ROOT yazılabilir değil, local klasörlerle devam ediliyor."
|
| 33 |
-
mkdir -p "$APP_ROOT/paper_trading/auto_trader" \
|
| 34 |
-
"$APP_ROOT/paper_trading/markets/us/auto_trader" \
|
| 35 |
-
"$APP_ROOT/paper_trading/markets/us" \
|
| 36 |
-
"$APP_ROOT/paper_trading/journal" \
|
| 37 |
-
"$APP_ROOT/paper_trading/incidents" \
|
| 38 |
-
"$APP_ROOT/logs"
|
| 39 |
-
fi
|
| 40 |
|
| 41 |
-
#
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
TELEGRAM_IP=$($PYTHON_BIN -c "
|
| 45 |
-
import urllib.request, json, ssl
|
| 46 |
-
ctx = ssl.create_default_context()
|
| 47 |
-
req = urllib.request.Request(
|
| 48 |
-
'https://1.1.1.1/dns-query?name=api.telegram.org&type=A',
|
| 49 |
-
headers={'Accept': 'application/dns-json'}
|
| 50 |
-
)
|
| 51 |
-
try:
|
| 52 |
-
resp = urllib.request.urlopen(req, timeout=10, context=ctx)
|
| 53 |
-
data = json.loads(resp.read())
|
| 54 |
-
for ans in data.get('Answer', []):
|
| 55 |
-
if ans.get('type') == 1:
|
| 56 |
-
print(ans['data'])
|
| 57 |
-
break
|
| 58 |
-
except Exception as e:
|
| 59 |
-
print('')
|
| 60 |
-
" 2>/dev/null)
|
| 61 |
-
if [ -z "$TELEGRAM_IP" ]; then
|
| 62 |
-
TELEGRAM_IP="149.154.167.220"
|
| 63 |
-
echo "[start.sh] DoH failed — fallback IP kullanılıyor: $TELEGRAM_IP"
|
| 64 |
-
fi
|
| 65 |
-
if [ -w /etc/hosts ]; then
|
| 66 |
-
if ! grep -q "api.telegram.org" /etc/hosts; then
|
| 67 |
-
echo "$TELEGRAM_IP api.telegram.org" >> /etc/hosts
|
| 68 |
-
fi
|
| 69 |
-
echo "[start.sh] /etc/hosts'a eklendi: $TELEGRAM_IP api.telegram.org"
|
| 70 |
-
else
|
| 71 |
-
echo "[start.sh] UYARI: /etc/hosts yazılamıyor, DNS fix atlandı."
|
| 72 |
-
fi
|
| 73 |
|
| 74 |
-
#
|
|
|
|
|
|
|
| 75 |
|
| 76 |
-
# Trading
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
echo "[start.sh] ${market} worker wrapper PID=$!"
|
| 88 |
-
}
|
| 89 |
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
|
| 94 |
# Telegram bot — token varsa başlat (7/24 çalışır, crash olursa restart)
|
| 95 |
if [ -n "$TELEGRAM_BOT_TOKEN" ] && [ -n "$TELEGRAM_CHAT_ID" ]; then
|
|
@@ -97,7 +45,7 @@ if [ -n "$TELEGRAM_BOT_TOKEN" ] && [ -n "$TELEGRAM_CHAT_ID" ]; then
|
|
| 97 |
(
|
| 98 |
while true; do
|
| 99 |
echo "[telegram-bot-wrapper] Bot başlatılıyor..."
|
| 100 |
-
|
| 101 |
echo "[telegram-bot-wrapper] Bot durdu, 5 saniye sonra restart..."
|
| 102 |
sleep 5
|
| 103 |
done
|
|
@@ -107,27 +55,6 @@ else
|
|
| 107 |
echo "[start.sh] TELEGRAM_BOT_TOKEN/CHAT_ID eksik — bot atlandı."
|
| 108 |
fi
|
| 109 |
|
| 110 |
-
# ─── Keepalive: HF Space'in uykuya dalmasını önle ───
|
| 111 |
-
# HF free tier, gelen HTTP trafiği olmazsa Space'i uyutur.
|
| 112 |
-
# Bu loop kendi dış URL'sine ping atarak "aktif" tutar.
|
| 113 |
-
(
|
| 114 |
-
sleep 180 # uvicorn'un ayağa kalkmasını bekle
|
| 115 |
-
SPACE_URL="${SPACE_HOST:-https://veteroner-borsa.hf.space}"
|
| 116 |
-
echo "[keepalive] Başlatıldı — her 5 dk $SPACE_URL ping"
|
| 117 |
-
while true; do
|
| 118 |
-
sleep 300
|
| 119 |
-
$PYTHON_BIN -c "
|
| 120 |
-
import urllib.request
|
| 121 |
-
try:
|
| 122 |
-
urllib.request.urlopen('${SPACE_URL}/', timeout=15)
|
| 123 |
-
print('[keepalive] ping OK')
|
| 124 |
-
except Exception as e:
|
| 125 |
-
print(f'[keepalive] ping failed: {e}')
|
| 126 |
-
" 2>&1 || true
|
| 127 |
-
done
|
| 128 |
-
) &
|
| 129 |
-
echo "[start.sh] Keepalive wrapper PID=$!"
|
| 130 |
-
|
| 131 |
# uvicorn ön planda (HF Spaces 7860'ı izler)
|
| 132 |
echo "[start.sh] uvicorn başlatılıyor (port 7860)..."
|
| 133 |
-
exec
|
|
|
|
| 1 |
#!/bin/bash
|
| 2 |
set -e
|
| 3 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
echo "[start.sh] Başlatılıyor..."
|
| 5 |
|
| 6 |
+
# Stale Python bytecode temizle (eski .pyc syntax hatalarını önler)
|
| 7 |
+
find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
|
| 8 |
+
echo "[start.sh] __pycache__ temizlendi"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
|
| 10 |
+
# Gerekli dizinleri oluştur
|
| 11 |
+
mkdir -p paper_trading/auto_trader paper_trading/journal paper_trading/incidents logs
|
| 12 |
+
mkdir -p paper_trading_us/auto_trader paper_trading_us/journal paper_trading_us/incidents
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
+
# Boot scan ownership tek yerde olmalı.
|
| 15 |
+
# app.py içindeki scheduler eksik/stale sonuçta tek bir boot scan başlatır.
|
| 16 |
+
echo "[start.sh] Boot scan scheduler tarafından yönetilecek"
|
| 17 |
|
| 18 |
+
# Trading worker BIST — crash olursa otomatik restart (daemon mode)
|
| 19 |
+
echo "[start.sh] BIST Trading worker başlatılıyor..."
|
| 20 |
+
(
|
| 21 |
+
while true; do
|
| 22 |
+
echo "[worker-bist] Worker başlatılıyor..."
|
| 23 |
+
python -m trading.worker --daemon --mode paper --interval 3600 --market bist 2>&1 || true
|
| 24 |
+
echo "[worker-bist] Worker durdu, 30 saniye sonra restart..."
|
| 25 |
+
sleep 30
|
| 26 |
+
done
|
| 27 |
+
) &
|
| 28 |
+
echo "[start.sh] BIST worker wrapper PID=$!"
|
|
|
|
|
|
|
| 29 |
|
| 30 |
+
# Trading worker US — crash olursa otomatik restart (daemon mode)
|
| 31 |
+
echo "[start.sh] US Trading worker başlatılıyor..."
|
| 32 |
+
(
|
| 33 |
+
while true; do
|
| 34 |
+
echo "[worker-us] Worker başlatılıyor..."
|
| 35 |
+
python -m trading.worker --daemon --mode paper --interval 3600 --market us 2>&1 || true
|
| 36 |
+
echo "[worker-us] Worker durdu, 30 saniye sonra restart..."
|
| 37 |
+
sleep 30
|
| 38 |
+
done
|
| 39 |
+
) &
|
| 40 |
+
echo "[start.sh] US worker wrapper PID=$!"
|
| 41 |
|
| 42 |
# Telegram bot — token varsa başlat (7/24 çalışır, crash olursa restart)
|
| 43 |
if [ -n "$TELEGRAM_BOT_TOKEN" ] && [ -n "$TELEGRAM_CHAT_ID" ]; then
|
|
|
|
| 45 |
(
|
| 46 |
while true; do
|
| 47 |
echo "[telegram-bot-wrapper] Bot başlatılıyor..."
|
| 48 |
+
python telegram_bot.py 2>&1 || true
|
| 49 |
echo "[telegram-bot-wrapper] Bot durdu, 5 saniye sonra restart..."
|
| 50 |
sleep 5
|
| 51 |
done
|
|
|
|
| 55 |
echo "[start.sh] TELEGRAM_BOT_TOKEN/CHAT_ID eksik — bot atlandı."
|
| 56 |
fi
|
| 57 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
# uvicorn ön planda (HF Spaces 7860'ı izler)
|
| 59 |
echo "[start.sh] uvicorn başlatılıyor (port 7860)..."
|
| 60 |
+
exec uvicorn app:app --host 0.0.0.0 --port 7860
|
huggingface-space/sync_eligibility.py
CHANGED
|
@@ -1,33 +1,53 @@
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
"""Sync BIST100 scan results into stock_eligibility.json."""
|
|
|
|
| 3 |
import json
|
| 4 |
from datetime import datetime, timezone
|
|
|
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
'
|
| 21 |
-
'
|
| 22 |
-
'
|
| 23 |
-
'
|
| 24 |
-
'reason': v.get('reason', 'unknown'),
|
| 25 |
-
'evaluated_at': v.get('evaluated_at', ''),
|
| 26 |
}
|
| 27 |
|
| 28 |
-
|
| 29 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
print(f'Eligible: {len(elig)} - {sorted(elig)}')
|
|
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
"""Sync BIST100 scan results into stock_eligibility.json."""
|
| 3 |
+
import argparse
|
| 4 |
import json
|
| 5 |
from datetime import datetime, timezone
|
| 6 |
+
from pathlib import Path
|
| 7 |
|
| 8 |
+
from trading.market_registry import DEFAULT_MARKET_ID, get_eligibility_path, get_scan_results_path
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def main() -> int:
|
| 12 |
+
parser = argparse.ArgumentParser(description="Sync scan results into eligibility cache")
|
| 13 |
+
parser.add_argument("--market", default=DEFAULT_MARKET_ID, choices=["bist", "us"])
|
| 14 |
+
parser.add_argument("--scan-file")
|
| 15 |
+
args = parser.parse_args()
|
| 16 |
+
|
| 17 |
+
scan_file = Path(args.scan_file) if args.scan_file else get_scan_results_path(args.market, completed=True)
|
| 18 |
+
scan = json.loads(scan_file.read_text())
|
| 19 |
+
s2 = scan.get('stage2', {})
|
| 20 |
+
|
| 21 |
+
eligibility = {
|
| 22 |
+
'market_id': args.market,
|
| 23 |
+
'stocks': {},
|
| 24 |
+
'last_refresh': datetime.now(timezone.utc).isoformat(),
|
| 25 |
+
'source': f'{args.market}_scan',
|
|
|
|
|
|
|
| 26 |
}
|
| 27 |
|
| 28 |
+
for sym, v in s2.items():
|
| 29 |
+
eligibility['stocks'][sym] = {
|
| 30 |
+
'market_id': args.market,
|
| 31 |
+
'symbol': sym,
|
| 32 |
+
'eligible': v.get('eligible', False),
|
| 33 |
+
'dir_acc': v.get('dir_acc', 0),
|
| 34 |
+
'sharpe': v.get('sharpe', 0),
|
| 35 |
+
'hit_rate': v.get('hit_rate', 0),
|
| 36 |
+
'total_return_pct': v.get('total_return_pct', 0),
|
| 37 |
+
'trades': v.get('trades', 0),
|
| 38 |
+
'reason': v.get('reason', 'unknown'),
|
| 39 |
+
'evaluated_at': v.get('evaluated_at', ''),
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
out_file = get_eligibility_path(args.market)
|
| 43 |
+
out_file.parent.mkdir(parents=True, exist_ok=True)
|
| 44 |
+
out_file.write_text(json.dumps(eligibility, indent=2, default=str))
|
| 45 |
+
|
| 46 |
+
elig = [s for s, v in eligibility['stocks'].items() if v['eligible']]
|
| 47 |
+
print(f'Updated {out_file} with {len(eligibility["stocks"])} stocks')
|
| 48 |
+
print(f'Eligible: {len(elig)} - {sorted(elig)}')
|
| 49 |
+
return 0
|
| 50 |
+
|
| 51 |
|
| 52 |
+
if __name__ == '__main__':
|
| 53 |
+
raise SystemExit(main())
|
|
|
huggingface-space/telegram_bot.py
CHANGED
|
@@ -1,52 +1,31 @@
|
|
| 1 |
-
"""
|
| 2 |
|
| 3 |
-
|
| 4 |
-
otomatik bildirimler gönderir (alım, satım, tarama, sinyal, hata).
|
| 5 |
|
| 6 |
Kullanım:
|
| 7 |
python telegram_bot.py # Foreground (test)
|
| 8 |
python telegram_bot.py & # Arka planda
|
| 9 |
|
| 10 |
-
|
| 11 |
-
/durum — Sistem durumu
|
| 12 |
-
/portfoy — Portföy
|
| 13 |
-
/hisseler — Eligible hisse listesi
|
| 14 |
/sinyaller — Bugünkü BUY/SELL sinyalleri
|
| 15 |
-
/hisse SYM — Tek hisse detaylı analiz (canlı göstergeler)
|
| 16 |
-
/islemler — Son işlemler (alım/satım geçmişi)
|
| 17 |
-
/tarama — Tarama durumu ve ilerlemesi
|
| 18 |
/yardim — Komut listesi
|
| 19 |
-
|
| 20 |
-
Proaktif bildirimler (worker tarafından çağrılır):
|
| 21 |
-
notify_scan_started()
|
| 22 |
-
notify_scan_complete(results)
|
| 23 |
-
notify_signals_generated(signals_summary)
|
| 24 |
-
notify_trade_executed(trade)
|
| 25 |
-
notify_cycle_complete(result)
|
| 26 |
-
notify_error(msg)
|
| 27 |
"""
|
| 28 |
from __future__ import annotations
|
| 29 |
|
| 30 |
-
# --- HF Space DNS fix: api.telegram.org DNS çözümü engellenmiş ---
|
| 31 |
-
import socket as _socket
|
| 32 |
-
_orig_getaddrinfo = _socket.getaddrinfo
|
| 33 |
-
def _tg_dns_fix(host, port, family=0, type=0, proto=0, flags=0):
|
| 34 |
-
if host == "api.telegram.org":
|
| 35 |
-
return _orig_getaddrinfo("149.154.167.220", port, family, type, proto, flags)
|
| 36 |
-
return _orig_getaddrinfo(host, port, family, type, proto, flags)
|
| 37 |
-
_socket.getaddrinfo = _tg_dns_fix
|
| 38 |
-
|
| 39 |
import json
|
| 40 |
import logging
|
| 41 |
import os
|
|
|
|
| 42 |
import sys
|
| 43 |
import time
|
| 44 |
-
import
|
| 45 |
import urllib.request
|
| 46 |
import urllib.error
|
| 47 |
from datetime import datetime, timezone
|
| 48 |
from pathlib import Path
|
| 49 |
-
from typing import Any, Dict, List, Optional
|
| 50 |
|
| 51 |
logging.basicConfig(
|
| 52 |
level=logging.INFO,
|
|
@@ -58,6 +37,7 @@ BOT_TOKEN = os.environ.get("TELEGRAM_BOT_TOKEN", "")
|
|
| 58 |
CHAT_ID = os.environ.get("TELEGRAM_CHAT_ID", "")
|
| 59 |
|
| 60 |
if not BOT_TOKEN or not CHAT_ID:
|
|
|
|
| 61 |
env_file = Path(__file__).parent / ".env"
|
| 62 |
if env_file.exists():
|
| 63 |
for line in env_file.read_text().splitlines():
|
|
@@ -71,113 +51,130 @@ if not BOT_TOKEN:
|
|
| 71 |
print("HATA: TELEGRAM_BOT_TOKEN bulunamadı. .env dosyasını kontrol edin.")
|
| 72 |
sys.exit(1)
|
| 73 |
|
| 74 |
-
# HF Spaces
|
| 75 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
_PROXY_URL = os.environ.get(
|
| 77 |
"TELEGRAM_PROXY_URL",
|
| 78 |
-
"https://
|
| 79 |
)
|
| 80 |
-
_DIRECT_BASE = f"https://api.telegram.org/bot{BOT_TOKEN}"
|
| 81 |
if _PROXY_URL:
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 90 |
|
| 91 |
-
# paper_trading dizini
|
| 92 |
_SCRIPT_DIR = Path(__file__).parent
|
| 93 |
-
_PAPER_DIR
|
| 94 |
_SCRIPT_DIR / "paper_trading"
|
| 95 |
if (_SCRIPT_DIR / "paper_trading").exists()
|
| 96 |
else _SCRIPT_DIR.parent / "paper_trading"
|
| 97 |
)
|
| 98 |
|
| 99 |
_STATUS_FILE = _PAPER_DIR / "auto_trader" / "status.json"
|
| 100 |
-
_STATE_FILE
|
| 101 |
-
_SCAN_FILE
|
| 102 |
-
_DB_PATH
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 103 |
|
| 104 |
|
| 105 |
-
#
|
| 106 |
# Telegram API helpers
|
| 107 |
-
#
|
| 108 |
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 114 |
|
| 115 |
-
def _api(method: str, payload: dict) -> dict:
|
| 116 |
-
"""Telegram API çağrısı. Proxy başarısız olursa doğrudan API'ye düşer."""
|
| 117 |
-
global _proxy_fail_count, _proxy_disabled
|
| 118 |
-
data = json.dumps(payload).encode()
|
| 119 |
-
sock_timeout = 30 if "getUpdates" in method else 15
|
| 120 |
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
logger.info("Proxy tekrar aktif, hata sayacı sıfırlandı")
|
| 134 |
-
_proxy_fail_count = 0
|
| 135 |
-
return result
|
| 136 |
-
except urllib.error.HTTPError as e:
|
| 137 |
-
if e.code in (402, 403, 503):
|
| 138 |
-
_proxy_fail_count += 1
|
| 139 |
-
logger.warning(
|
| 140 |
-
"Proxy HTTP %d (%s) — doğrudan API'ye geçiliyor (hata #%d)",
|
| 141 |
-
e.code, e.reason, _proxy_fail_count,
|
| 142 |
-
)
|
| 143 |
-
if _proxy_fail_count >= 3:
|
| 144 |
-
_proxy_disabled = True
|
| 145 |
-
logger.error(
|
| 146 |
-
"Proxy %d kez başarısız — kalıcı olarak devre dışı bırakıldı. "
|
| 147 |
-
"Vercel proxy'sini yeniden deploy edin: TELEGRAM_PROXY_URL env'i güncelleyin.",
|
| 148 |
-
_proxy_fail_count,
|
| 149 |
-
)
|
| 150 |
-
else:
|
| 151 |
-
logger.debug("Proxy HTTP %d — doğrudan API deneniyor", e.code)
|
| 152 |
-
except Exception as e:
|
| 153 |
-
logger.debug("Proxy hatası (%s) — doğrudan API deneniyor: %s", type(e).__name__, e)
|
| 154 |
|
| 155 |
-
|
| 156 |
-
|
|
|
|
| 157 |
req = urllib.request.Request(
|
| 158 |
-
|
|
|
|
| 159 |
headers={"Content-Type": "application/json"},
|
| 160 |
)
|
| 161 |
-
|
| 162 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 163 |
|
| 164 |
|
| 165 |
-
def send(text: str, parse_mode: str = "HTML") ->
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
| 169 |
-
|
| 170 |
-
|
| 171 |
-
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
"text": chunk,
|
| 175 |
-
"parse_mode": parse_mode,
|
| 176 |
-
})
|
| 177 |
-
except Exception as e:
|
| 178 |
-
logger.error("Send failed: %s", e)
|
| 179 |
-
ok = False
|
| 180 |
-
return ok
|
| 181 |
|
| 182 |
|
| 183 |
def get_updates(offset: int) -> list:
|
|
@@ -189,16 +186,16 @@ def get_updates(offset: int) -> list:
|
|
| 189 |
})
|
| 190 |
return result.get("result", [])
|
| 191 |
except urllib.error.URLError as e:
|
| 192 |
-
logger.
|
| 193 |
return []
|
| 194 |
except Exception as e:
|
| 195 |
-
logger.
|
| 196 |
return []
|
| 197 |
|
| 198 |
|
| 199 |
-
#
|
| 200 |
-
#
|
| 201 |
-
#
|
| 202 |
|
| 203 |
def _read_json(path: Path) -> dict:
|
| 204 |
try:
|
|
@@ -209,150 +206,144 @@ def _read_json(path: Path) -> dict:
|
|
| 209 |
return {}
|
| 210 |
|
| 211 |
|
| 212 |
-
def
|
| 213 |
-
"""SQLite sorgusu çalıştır, dict listesi döndür."""
|
| 214 |
try:
|
| 215 |
import sqlite3
|
| 216 |
if not _DB_PATH.exists():
|
| 217 |
return []
|
| 218 |
conn = sqlite3.connect(str(_DB_PATH))
|
| 219 |
conn.row_factory = sqlite3.Row
|
| 220 |
-
rows = conn.execute(
|
|
|
|
|
|
|
|
|
|
| 221 |
conn.close()
|
| 222 |
return [dict(r) for r in rows]
|
| 223 |
-
except Exception
|
| 224 |
-
logger.debug("DB query error: %s", e)
|
| 225 |
return []
|
| 226 |
|
| 227 |
|
| 228 |
-
def
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 233 |
|
| 234 |
|
| 235 |
-
def
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
(
|
| 239 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 240 |
|
| 241 |
|
| 242 |
def _db_get_portfolio() -> dict:
|
|
|
|
| 243 |
state = _read_json(_STATE_FILE)
|
|
|
|
|
|
|
| 244 |
return {
|
| 245 |
-
"cash": state.get("broker_cash",
|
| 246 |
"positions": state.get("broker_positions", {}),
|
| 247 |
}
|
| 248 |
|
| 249 |
|
| 250 |
def _is_worker_running() -> bool:
|
| 251 |
-
pid_file =
|
| 252 |
if not pid_file.exists():
|
| 253 |
return False
|
| 254 |
try:
|
| 255 |
pid = int(pid_file.read_text().strip())
|
| 256 |
-
os
|
|
|
|
| 257 |
return True
|
| 258 |
except Exception:
|
| 259 |
return False
|
| 260 |
|
| 261 |
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
from data.stock_data_api import get_stock_data_for_api
|
| 266 |
-
ticker = symbol if symbol.endswith(".IS") else f"{symbol}.IS"
|
| 267 |
-
df = get_stock_data_for_api(ticker, period="5d", interval="1d")
|
| 268 |
-
if df is not None and not df.empty:
|
| 269 |
-
return float(df["Close"].iloc[-1])
|
| 270 |
-
except Exception:
|
| 271 |
-
pass
|
| 272 |
-
return None
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
def _get_live_indicators(symbol: str) -> Optional[Dict[str, Any]]:
|
| 276 |
-
"""Tek hisse için canlı analiz çalıştır."""
|
| 277 |
-
try:
|
| 278 |
-
from data.stock_data_api import get_stock_data_for_api
|
| 279 |
-
from analysis.scan_signals_api import compute_scan_signals_for_df
|
| 280 |
-
|
| 281 |
-
ticker = symbol if symbol.endswith(".IS") else f"{symbol}.IS"
|
| 282 |
-
df = get_stock_data_for_api(ticker, period="1y", interval="1d")
|
| 283 |
-
if df is None or df.empty:
|
| 284 |
-
return None
|
| 285 |
-
result = compute_scan_signals_for_df(symbol.replace(".IS", ""), df)
|
| 286 |
-
return result.to_dict()
|
| 287 |
-
except Exception as e:
|
| 288 |
-
logger.debug("Live indicator error for %s: %s", symbol, e)
|
| 289 |
-
return None
|
| 290 |
-
|
| 291 |
-
|
| 292 |
-
# ═══════════════════════════════════════════════════════════════════════════
|
| 293 |
-
# Komut işleyicileri
|
| 294 |
-
# ═══════════════════════════════════════════════════════════════════════════
|
| 295 |
|
| 296 |
def cmd_durum() -> str:
|
| 297 |
status = _read_json(_STATUS_FILE)
|
| 298 |
-
state
|
|
|
|
|
|
|
| 299 |
|
| 300 |
worker_running = _is_worker_running()
|
| 301 |
is_running = status.get("is_running", False)
|
| 302 |
-
mode = status.get("mode"
|
| 303 |
last_run = state.get("last_run_date", "—")
|
| 304 |
total_trades = state.get("total_trades", 0)
|
| 305 |
total_days = state.get("total_days_run", 0)
|
| 306 |
-
phase = status.get("current_phase", "—")
|
| 307 |
-
cash = state.get("broker_cash",
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
updated = updated[:16].replace("T", " ")
|
| 311 |
-
|
| 312 |
-
# Kill switch
|
| 313 |
-
ks_icon = "✅ Kapalı"
|
| 314 |
ks_file = _PAPER_DIR / "kill_switch.json"
|
| 315 |
-
if not ks_file.exists():
|
| 316 |
-
ks_file = Path("paper_trading/kill_switch.json")
|
| 317 |
if ks_file.exists():
|
| 318 |
try:
|
| 319 |
ks = json.loads(ks_file.read_text())
|
| 320 |
if ks.get("active"):
|
| 321 |
-
ks_icon = f"🔴 AKTİF — {ks.get('reason',
|
| 322 |
except Exception:
|
| 323 |
pass
|
| 324 |
|
| 325 |
-
# Scan info
|
| 326 |
-
scan = _read_json(_SCAN_FILE)
|
| 327 |
-
scan_status = "✅ Tamamlandı" if scan.get("completed") else "🔄 Devam ediyor" if scan else "❌ Yok"
|
| 328 |
-
scan_date = scan.get("scan_date", "—")[:10] if scan else "—"
|
| 329 |
-
|
| 330 |
-
# Positions
|
| 331 |
-
positions = state.get("broker_positions", {})
|
| 332 |
-
pos_count = len(positions)
|
| 333 |
-
|
| 334 |
-
# Last result
|
| 335 |
-
last_result = status.get("last_result", {})
|
| 336 |
-
last_status = last_result.get("status", "—")
|
| 337 |
-
last_trades = last_result.get("trades_executed", 0)
|
| 338 |
-
|
| 339 |
now_str = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 340 |
-
|
| 341 |
-
f"📊 <b>Sistem Durumu</b> [{now_str}]
|
| 342 |
-
|
| 343 |
-
f"
|
| 344 |
-
f"
|
| 345 |
-
f"
|
| 346 |
-
f"
|
| 347 |
-
f"
|
| 348 |
-
f"
|
| 349 |
-
|
| 350 |
-
|
| 351 |
-
|
| 352 |
-
f"
|
| 353 |
-
|
| 354 |
-
|
| 355 |
-
)
|
|
|
|
|
|
|
| 356 |
|
| 357 |
|
| 358 |
def cmd_portfoy() -> str:
|
|
@@ -368,102 +359,96 @@ def cmd_portfoy() -> str:
|
|
| 368 |
)
|
| 369 |
|
| 370 |
lines = [f"💼 <b>Portföy</b> ({len(positions)} pozisyon)\n"]
|
| 371 |
-
|
| 372 |
-
for sym, pos in list(positions.items())[:
|
| 373 |
-
qty
|
| 374 |
cost = pos.get("avg_cost", 0)
|
| 375 |
-
|
| 376 |
-
|
| 377 |
-
|
| 378 |
-
live_px = _get_live_price(sym)
|
| 379 |
-
if live_px:
|
| 380 |
-
pnl_pct = (live_px / cost - 1) * 100 if cost else 0
|
| 381 |
-
notional = qty * live_px
|
| 382 |
-
pnl_emoji = "📈" if pnl_pct >= 0 else "📉"
|
| 383 |
-
lines.append(
|
| 384 |
-
f"• <code>{sym}</code> {qty} adet\n"
|
| 385 |
-
f" Maliyet: {cost:.2f} Güncel: {live_px:.2f}\n"
|
| 386 |
-
f" {pnl_emoji} P&L: <b>{pnl_pct:+.1f}%</b> Değer: {notional:,.0f} TL\n"
|
| 387 |
-
f" Giriş: {entry_date}"
|
| 388 |
-
)
|
| 389 |
-
else:
|
| 390 |
-
notional = qty * cost
|
| 391 |
-
lines.append(
|
| 392 |
-
f"• <code>{sym}</code> {qty} adet @ {cost:.2f} TL"
|
| 393 |
-
f" ({notional:,.0f} TL) Giriş: {entry_date}"
|
| 394 |
-
)
|
| 395 |
-
total_value += notional if live_px else qty * cost
|
| 396 |
|
| 397 |
-
if len(positions) >
|
| 398 |
-
lines.append(f"
|
| 399 |
|
| 400 |
-
equity = cash +
|
| 401 |
lines.append(f"\n💰 Nakit: <code>{cash:,.0f} TL</code>")
|
| 402 |
lines.append(f"📊 Toplam Değer: <code>{equity:,.0f} TL</code>")
|
| 403 |
return "\n".join(lines)
|
| 404 |
|
| 405 |
|
| 406 |
def cmd_hisseler() -> str:
|
| 407 |
-
|
| 408 |
-
if not
|
| 409 |
-
|
| 410 |
-
|
| 411 |
-
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
|
| 417 |
-
|
| 418 |
-
|
| 419 |
-
|
| 420 |
-
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
|
|
|
|
|
|
| 425 |
|
| 426 |
-
|
| 427 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 428 |
|
| 429 |
if not eligible:
|
| 430 |
return (
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
"
|
| 434 |
)
|
| 435 |
|
| 436 |
lines = [
|
| 437 |
-
f"📋 <b>Eligible Hisseler</b> [{
|
| 438 |
-
f"Toplam: <b>{len(eligible)}</b>
|
| 439 |
]
|
| 440 |
-
for
|
| 441 |
-
|
| 442 |
-
sharpe =
|
| 443 |
-
|
| 444 |
-
|
| 445 |
-
trades = v.get("trades", 0)
|
| 446 |
-
mdd = v.get("max_drawdown_pct", 0)
|
| 447 |
-
|
| 448 |
-
ret_emoji = "📈" if ret >= 0 else "📉"
|
| 449 |
lines.append(
|
| 450 |
-
f"
|
| 451 |
-
f" 🎯 Doğruluk: <code>{dir_acc:.1%}</code> "
|
| 452 |
-
f"📊 Sharpe: <code>{sharpe:.2f}</code>\n"
|
| 453 |
-
f" 🏹 İsabet: <code>{hit:.0f}%</code> "
|
| 454 |
-
f"🔄 İşlem: <code>{trades}</code>\n"
|
| 455 |
-
f" {ret_emoji} Getiri: <code>{ret:+.1f}%</code> "
|
| 456 |
-
f"📉 MDD: <code>{mdd:.1f}%</code>"
|
| 457 |
)
|
| 458 |
|
| 459 |
-
if
|
| 460 |
-
lines.append(f"\n
|
| 461 |
|
| 462 |
return "\n".join(lines)
|
| 463 |
|
| 464 |
|
| 465 |
def cmd_sinyaller() -> str:
|
| 466 |
-
"""Bugünkü sinyaller — signals_log tablosundan."""
|
| 467 |
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 468 |
signals = _db_get_signals(today)
|
| 469 |
|
|
@@ -471,514 +456,400 @@ def cmd_sinyaller() -> str:
|
|
| 471 |
return (
|
| 472 |
f"📡 <b>Bugünkü Sinyaller</b> [{today}]\n\n"
|
| 473 |
"Henüz sinyal üretilmedi.\n"
|
| 474 |
-
"Sinyaller
|
| 475 |
)
|
| 476 |
|
| 477 |
-
buys
|
| 478 |
sells = [s for s in signals if s.get("signal") == "SELL"]
|
| 479 |
holds = [s for s in signals if s.get("signal") == "HOLD"]
|
| 480 |
|
| 481 |
lines = [
|
| 482 |
f"📡 <b>Sinyaller</b> [{today}]\n"
|
| 483 |
-
f"🟢 BUY:
|
| 484 |
]
|
| 485 |
|
| 486 |
if buys:
|
| 487 |
lines.append("🟢 <b>BUY Sinyalleri:</b>")
|
| 488 |
-
for s in buys[:
|
| 489 |
-
conf = float(s.get("confidence", 0))
|
| 490 |
pred = float(s.get("predicted_return", 0))
|
| 491 |
-
ml_sig = s.get("ml_signal", "?")
|
| 492 |
-
tech_sig = s.get("tech_signal", "?")
|
| 493 |
-
action = s.get("action_taken", "?")
|
| 494 |
lines.append(
|
| 495 |
f" <code>{s['symbol']:<8}</code> "
|
| 496 |
-
f"Güven:
|
| 497 |
-
f" ML: {ml_sig} Teknik: {tech_sig} Aksiyon: {action}"
|
| 498 |
)
|
| 499 |
|
| 500 |
if sells:
|
| 501 |
lines.append("\n🔴 <b>SELL Sinyalleri:</b>")
|
| 502 |
-
for s in sells[:
|
| 503 |
-
conf = float(s.get("confidence", 0))
|
| 504 |
-
|
| 505 |
-
tech_sig = s.get("tech_signal", "?")
|
| 506 |
-
lines.append(
|
| 507 |
-
f" <code>{s['symbol']:<8}</code> "
|
| 508 |
-
f"Güven: <b>{conf:.0f}%</b> ML: {ml_sig} Teknik: {tech_sig}"
|
| 509 |
-
)
|
| 510 |
|
| 511 |
return "\n".join(lines)
|
| 512 |
|
| 513 |
|
| 514 |
-
def
|
| 515 |
-
|
| 516 |
-
|
| 517 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 518 |
|
| 519 |
-
data = _get_live_indicators(sym)
|
| 520 |
-
if not data:
|
| 521 |
-
return f"❌ <code>{sym}</code> için veri alınamadı."
|
| 522 |
|
| 523 |
-
|
| 524 |
-
|
| 525 |
-
|
| 526 |
-
sig = data.get("technical_signal", "?")
|
| 527 |
-
ind = data.get("indicators", {})
|
| 528 |
-
scores = data.get("scores", {})
|
| 529 |
-
gates = data.get("gates", {})
|
| 530 |
|
| 531 |
-
|
| 532 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 533 |
|
| 534 |
-
lines = [
|
| 535 |
-
f"📊 <b>{sym} Detaylı Analiz</b>\n",
|
| 536 |
-
f"💰 Fiyat: <code>{price:.2f} TL</code> "
|
| 537 |
-
f"{change_emoji} <code>{change:+.2f}%</code>" if price and change is not None else "",
|
| 538 |
-
f"📦 Hacim: <code>{vol:,.0f}</code>" if vol else "",
|
| 539 |
-
f"{sig_emoji} Teknik Sinyal: <b>{sig}</b>\n",
|
| 540 |
-
]
|
| 541 |
|
| 542 |
-
|
| 543 |
-
|
| 544 |
-
|
| 545 |
-
|
| 546 |
-
|
| 547 |
-
|
| 548 |
-
|
| 549 |
-
|
| 550 |
-
|
| 551 |
-
|
| 552 |
-
|
| 553 |
-
|
| 554 |
-
|
| 555 |
-
|
| 556 |
-
|
| 557 |
-
|
| 558 |
-
|
| 559 |
-
lines.append(f" Bollinger: <code>{bollinger_pos:.2f}</code>")
|
| 560 |
-
if sma20:
|
| 561 |
-
lines.append(f" SMA20: <code>{sma20:.2f}</code>")
|
| 562 |
-
if sma50:
|
| 563 |
-
lines.append(f" SMA50: <code>{sma50:.2f}</code>")
|
| 564 |
-
if sma200:
|
| 565 |
-
lines.append(f" SMA200: <code>{sma200:.2f}</code>")
|
| 566 |
-
if volatility:
|
| 567 |
-
lines.append(f" Volatilite: <code>{volatility:.1f}%</code>")
|
| 568 |
-
if vol_ratio:
|
| 569 |
-
lines.append(f" Hacim Oranı (20g): <code>{vol_ratio:.2f}x</code>")
|
| 570 |
-
|
| 571 |
-
# Skorlar
|
| 572 |
-
lines.append("\n📊 <b>Skorlar (0-100):</b>")
|
| 573 |
-
score_names = {
|
| 574 |
-
"total": "Toplam", "trend": "Trend", "momentum": "Momentum",
|
| 575 |
-
"volume": "Hacim", "volatility": "Volatilite", "pattern": "Pattern",
|
| 576 |
-
}
|
| 577 |
-
for key, label in score_names.items():
|
| 578 |
-
val = scores.get(key)
|
| 579 |
-
if val is not None:
|
| 580 |
-
bar = "█" * int(val / 10) + "░" * (10 - int(val / 10))
|
| 581 |
-
lines.append(f" {label}: <code>{bar} {val:.0f}</code>")
|
| 582 |
-
|
| 583 |
-
# Kapılar
|
| 584 |
-
lines.append("\n🚦 <b>Kapılar:</b>")
|
| 585 |
-
gate_names = {
|
| 586 |
-
"trend_ok": "Trend", "momentum_ok": "Momentum",
|
| 587 |
-
"volume_ok": "Hacim", "volatility_ok": "Volatilite",
|
| 588 |
-
"pattern_ok": "Pattern", "required_ok": "GENEL",
|
| 589 |
-
}
|
| 590 |
-
for key, label in gate_names.items():
|
| 591 |
-
val = gates.get(key)
|
| 592 |
-
if val is not None:
|
| 593 |
-
icon = "✅" if val else "❌"
|
| 594 |
-
lines.append(f" {icon} {label}")
|
| 595 |
-
|
| 596 |
-
# Scan stage2 info if available
|
| 597 |
-
scan = _read_json(_SCAN_FILE)
|
| 598 |
-
stage2 = scan.get("stage2", {}).get(sym, {})
|
| 599 |
-
if stage2.get("eligible"):
|
| 600 |
-
lines.append(f"\n✅ <b>Stage2 Eligible</b>")
|
| 601 |
-
lines.append(f" Doğruluk: {stage2.get('dir_acc', 0):.1%}")
|
| 602 |
-
lines.append(f" Sharpe: {stage2.get('sharpe', 0):.2f}")
|
| 603 |
-
lines.append(f" Getiri: {stage2.get('total_return_pct', 0):+.1f}%")
|
| 604 |
-
|
| 605 |
-
return "\n".join(l for l in lines if l)
|
| 606 |
-
|
| 607 |
-
|
| 608 |
-
def cmd_islemler() -> str:
|
| 609 |
-
"""Son işlemler."""
|
| 610 |
-
trades = _db_get_recent_trades(15)
|
| 611 |
-
if not trades:
|
| 612 |
-
return "📒 <b>Son İşlemler</b>\n\nHenüz işlem gerçekleşmedi."
|
| 613 |
-
|
| 614 |
-
lines = [f"📒 <b>Son İşlemler</b> ({len(trades)} kayıt)\n"]
|
| 615 |
-
for t in trades:
|
| 616 |
-
side = t.get("side", "?")
|
| 617 |
-
side_emoji = "🟢" if side == "BUY" else "🔴"
|
| 618 |
-
sym = t.get("symbol", "?")
|
| 619 |
-
qty = t.get("quantity", 0)
|
| 620 |
-
entry_px = t.get("entry_price", 0)
|
| 621 |
-
exit_px = t.get("exit_price")
|
| 622 |
-
pnl = t.get("pnl")
|
| 623 |
-
pnl_pct = t.get("pnl_pct")
|
| 624 |
-
status_t = t.get("status", "?")
|
| 625 |
-
entry_date = str(t.get("entry_date", ""))[:10]
|
| 626 |
-
exit_date = str(t.get("exit_date", ""))[:10] if t.get("exit_date") else ""
|
| 627 |
-
reason = t.get("exit_reason", "")
|
| 628 |
-
|
| 629 |
-
line = f"{side_emoji} <code>{sym}</code> {qty} adet @ {entry_px:.2f}"
|
| 630 |
-
if exit_px and pnl is not None:
|
| 631 |
-
pnl_emoji = "📈" if pnl >= 0 else "📉"
|
| 632 |
-
line += (
|
| 633 |
-
f"\n Çıkış: {exit_px:.2f} {pnl_emoji} P&L: <b>{pnl:+.2f} TL</b>"
|
| 634 |
-
f" ({pnl_pct:+.1f}%)" if pnl_pct else ""
|
| 635 |
-
)
|
| 636 |
-
if reason:
|
| 637 |
-
line += f"\n Sebep: {reason}"
|
| 638 |
-
line += f"\n {entry_date} → {exit_date}"
|
| 639 |
-
else:
|
| 640 |
-
line += f"\n Durum: {status_t} Tarih: {entry_date}"
|
| 641 |
-
lines.append(line)
|
| 642 |
|
| 643 |
-
return "\n".join(lines)
|
| 644 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 645 |
|
| 646 |
-
def cmd_tarama() -> str:
|
| 647 |
-
"""Tarama durumu ve detayları."""
|
| 648 |
-
if not _SCAN_FILE.exists():
|
| 649 |
-
return "🔍 <b>Tarama Durumu</b>\n\nTarama dosyası bulunamadı."
|
| 650 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 651 |
try:
|
| 652 |
-
|
| 653 |
-
|
| 654 |
-
return
|
| 655 |
-
|
| 656 |
-
|
| 657 |
-
|
| 658 |
-
|
| 659 |
-
|
| 660 |
-
|
| 661 |
-
|
| 662 |
-
if
|
| 663 |
-
|
| 664 |
-
|
| 665 |
-
|
| 666 |
-
|
| 667 |
-
|
| 668 |
-
|
| 669 |
-
|
| 670 |
-
|
| 671 |
-
|
| 672 |
-
|
| 673 |
-
# Stage2 stats
|
| 674 |
-
s2_total = len(stage2)
|
| 675 |
-
s2_eligible = sum(1 for v in stage2.values() if v.get("eligible"))
|
| 676 |
-
s2_filtered = sum(1 for v in stage2.values() if not v.get("eligible"))
|
| 677 |
|
|
|
|
| 678 |
lines = [
|
| 679 |
-
f"
|
| 680 |
-
|
| 681 |
-
f"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 682 |
]
|
| 683 |
-
|
| 684 |
-
lines.append(f"Bitiş: <code>{finished}</code>")
|
| 685 |
|
| 686 |
-
lines.append(f"\n📊 <b>Stage 1</b> (Filtre):")
|
| 687 |
-
lines.append(f" Toplam: {s1_total} ✅ Geçen: {s1_pass} ❌ Elenen: {s1_fail}")
|
| 688 |
|
| 689 |
-
|
| 690 |
-
|
| 691 |
-
|
|
|
|
| 692 |
|
| 693 |
-
if
|
| 694 |
-
|
| 695 |
-
|
| 696 |
-
|
| 697 |
-
|
| 698 |
)
|
| 699 |
-
for sym, v in eligible_list[:10]:
|
| 700 |
-
lines.append(
|
| 701 |
-
f" <code>{sym}</code> — "
|
| 702 |
-
f"Doğruluk: {v.get('dir_acc', 0):.0%} "
|
| 703 |
-
f"Sharpe: {v.get('sharpe', 0):.2f} "
|
| 704 |
-
f"Getiri: {v.get('total_return_pct', 0):+.1f}%"
|
| 705 |
-
)
|
| 706 |
|
| 707 |
-
|
| 708 |
-
|
| 709 |
-
|
| 710 |
-
|
| 711 |
-
|
| 712 |
-
|
| 713 |
-
|
| 714 |
-
"/
|
| 715 |
-
"/hisseler — Eligible hisse listesi\n"
|
| 716 |
-
"/sinyaller — Bugünkü BUY/SELL sinyalleri\n"
|
| 717 |
-
"/hisse THYAO — Tek hisse detaylı analiz\n"
|
| 718 |
-
"/islemler — Son alım/satım işlemleri\n"
|
| 719 |
-
"/tarama — Tarama durumu ve ilerlemesi\n"
|
| 720 |
-
"/yardim — Bu mesaj\n\n"
|
| 721 |
-
"📌 <i>Bot otomatik bildirim gönderir:</i>\n"
|
| 722 |
-
" • Tarama başladığında/bittiğinde\n"
|
| 723 |
-
" • Sinyal üretildiğinde\n"
|
| 724 |
-
" • Alım/satım yapıldığında\n"
|
| 725 |
-
" • Hata oluştuğunda"
|
| 726 |
-
)
|
| 727 |
|
|
|
|
|
|
|
| 728 |
|
| 729 |
-
|
| 730 |
-
|
| 731 |
-
|
|
|
|
| 732 |
|
| 733 |
-
def notify_scan_started() -> None:
|
| 734 |
-
"""Tarama başladığında bildirim."""
|
| 735 |
-
now = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 736 |
-
send(f"🔍 <b>BIST Tarama başladı</b> [{now}]\n\nStage1 + Stage2 çalışıyor...")
|
| 737 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 738 |
|
| 739 |
-
|
| 740 |
-
|
| 741 |
-
stage2 = results.get("stage2", {})
|
| 742 |
-
eligible = [(s, v) for s, v in stage2.items() if v.get("eligible")]
|
| 743 |
-
total = len(stage2)
|
| 744 |
-
now = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 745 |
|
| 746 |
lines = [
|
| 747 |
-
f"
|
| 748 |
-
f"
|
| 749 |
-
f"Eligible: <b>{len(eligible)}</b> hisse\n",
|
| 750 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 751 |
|
| 752 |
-
if eligible:
|
| 753 |
-
eligible.sort(key=lambda x: x[1].get("dir_acc", 0), reverse=True)
|
| 754 |
-
for sym, v in eligible[:10]:
|
| 755 |
-
lines.append(
|
| 756 |
-
f" ✅ <code>{sym}</code> — "
|
| 757 |
-
f"Doğruluk: {v.get('dir_acc', 0):.0%} "
|
| 758 |
-
f"Sharpe: {v.get('sharpe', 0):.2f} "
|
| 759 |
-
f"Getiri: {v.get('total_return_pct', 0):+.1f}%"
|
| 760 |
-
)
|
| 761 |
-
else:
|
| 762 |
-
lines.append(" ⚠️ Uygun hisse bulunamadı.")
|
| 763 |
|
| 764 |
-
|
|
|
|
|
|
|
| 765 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 766 |
|
| 767 |
-
|
| 768 |
-
|
| 769 |
-
|
| 770 |
-
hold_count: int,
|
| 771 |
-
details: List[Dict[str, Any]],
|
| 772 |
-
) -> None:
|
| 773 |
-
"""Sinyal üretimi tamamlandığında bildirim."""
|
| 774 |
-
now = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 775 |
-
total = buy_count + sell_count + hold_count
|
| 776 |
|
| 777 |
lines = [
|
| 778 |
-
f"
|
| 779 |
-
f"
|
| 780 |
-
f"🟢 BUY: {buy_count} 🔴 SELL: {sell_count} ⚪ HOLD: {hold_count}\n",
|
| 781 |
]
|
| 782 |
|
| 783 |
-
buys = [d for d in details if d.get("signal") == "BUY"]
|
| 784 |
if buys:
|
| 785 |
-
lines.append("🟢 <b>
|
| 786 |
-
for
|
| 787 |
-
|
| 788 |
-
|
| 789 |
-
|
| 790 |
-
f"ML: {b.get('ml_signal', '?')} "
|
| 791 |
-
f"Teknik: {b.get('tech_signal', '?')}"
|
| 792 |
-
)
|
| 793 |
-
|
| 794 |
-
send("\n".join(lines))
|
| 795 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 796 |
|
| 797 |
-
|
| 798 |
-
"""Alım veya satım gerçekleştiğinde bildirim."""
|
| 799 |
-
side = trade.get("side", "?")
|
| 800 |
-
sym = trade.get("symbol", "?")
|
| 801 |
-
qty = trade.get("qty", 0)
|
| 802 |
-
price = trade.get("price", 0)
|
| 803 |
-
pnl = trade.get("pnl")
|
| 804 |
-
reason = trade.get("reason", "")
|
| 805 |
-
confidence = trade.get("confidence")
|
| 806 |
|
| 807 |
-
now = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 808 |
|
| 809 |
-
|
| 810 |
-
|
| 811 |
-
|
| 812 |
-
f"Hisse: <b>{sym}</b>\n"
|
| 813 |
-
f"Adet: <code>{qty}</code>\n"
|
| 814 |
-
f"Fiyat: <code>{price:.2f} TL</code>\n"
|
| 815 |
-
f"Tutar: <code>{qty * price:,.0f} TL</code>"
|
| 816 |
-
)
|
| 817 |
-
if confidence:
|
| 818 |
-
text += f"\nGüven: <code>{confidence:.0f}%</code>"
|
| 819 |
-
else:
|
| 820 |
-
text = (
|
| 821 |
-
f"🔴 <b>SATIM GERÇEKLEŞTİ</b> [{now}]\n\n"
|
| 822 |
-
f"Hisse: <b>{sym}</b>\n"
|
| 823 |
-
f"Adet: <code>{qty}</code>\n"
|
| 824 |
-
f"Fiyat: <code>{price:.2f} TL</code>"
|
| 825 |
-
)
|
| 826 |
-
if pnl is not None:
|
| 827 |
-
pnl_emoji = "📈" if pnl >= 0 else "📉"
|
| 828 |
-
text += f"\n{pnl_emoji} P&L: <b>{pnl:+.2f} TL</b>"
|
| 829 |
-
if reason:
|
| 830 |
-
reason_tr = {
|
| 831 |
-
"stop_loss": "🛑 Stop Loss",
|
| 832 |
-
"take_profit": "🎯 Take Profit",
|
| 833 |
-
"ml_sell_signal": "🤖 ML Satış Sinyali",
|
| 834 |
-
"holding_period_expiry": "⏰ Süre Dolumu",
|
| 835 |
-
}.get(reason, reason)
|
| 836 |
-
text += f"\nSebep: {reason_tr}"
|
| 837 |
-
|
| 838 |
-
send(text)
|
| 839 |
-
|
| 840 |
-
|
| 841 |
-
def notify_cycle_complete(result: Dict[str, Any]) -> None:
|
| 842 |
-
"""İşlem döngüsü tamamlandığında bildirim."""
|
| 843 |
-
trades = result.get("trades_executed", 0)
|
| 844 |
-
pnl = result.get("realized_pnl_today", 0)
|
| 845 |
-
portfolio = result.get("portfolio", {})
|
| 846 |
-
equity = portfolio.get("equity", 0)
|
| 847 |
-
cash = portfolio.get("cash", 0)
|
| 848 |
-
status = result.get("status", "?")
|
| 849 |
-
mode = result.get("mode", "paper")
|
| 850 |
-
elapsed = result.get("elapsed_sec", 0)
|
| 851 |
-
model_risk = result.get("model_risk", {})
|
| 852 |
-
|
| 853 |
-
pnl_emoji = "📈" if pnl >= 0 else "📉"
|
| 854 |
-
now = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 855 |
-
|
| 856 |
-
text = (
|
| 857 |
-
f"📊 <b>İşlem Döngüsü Tamamlandı</b> [{now}]\n\n"
|
| 858 |
-
f"Durum: <code>{status}</code>\n"
|
| 859 |
-
f"Mod: <code>{mode}</code>\n"
|
| 860 |
-
f"Süre: <code>{elapsed:.0f}s</code>\n\n"
|
| 861 |
-
f"🔄 İşlem Sayısı: <b>{trades}</b>\n"
|
| 862 |
-
f"{pnl_emoji} Günlük P&L: <b>{pnl:+,.2f} TL</b>\n"
|
| 863 |
-
f"💰 Nakit: <code>{cash:,.0f} TL</code>\n"
|
| 864 |
-
f"📊 Toplam Değer: <code>{equity:,.0f} TL</code>"
|
| 865 |
-
)
|
| 866 |
|
| 867 |
-
|
| 868 |
-
|
| 869 |
-
|
| 870 |
-
|
| 871 |
-
|
| 872 |
-
details = result.get("trade_details", [])
|
| 873 |
-
if details:
|
| 874 |
-
text += "\n\n<b>İşlem Detayları:</b>"
|
| 875 |
-
for t in details[:10]:
|
| 876 |
-
side_emoji = "🟢" if t.get("side") == "BUY" else "🔴"
|
| 877 |
-
pnl_str = f" P&L: {t['pnl']:+.2f}" if "pnl" in t else ""
|
| 878 |
-
text += f"\n{side_emoji} {t['symbol']} {t['qty']}x @ {t['price']}{pnl_str}"
|
| 879 |
-
|
| 880 |
-
send(text)
|
| 881 |
-
|
| 882 |
-
|
| 883 |
-
def notify_error(title: str, error_msg: str) -> None:
|
| 884 |
-
"""Hata oluştuğunda bildirim."""
|
| 885 |
-
now = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 886 |
-
send(
|
| 887 |
-
f"🚨 <b>HATA</b> [{now}]\n\n"
|
| 888 |
-
f"<b>{title}</b>\n"
|
| 889 |
-
f"<code>{error_msg[:500]}</code>"
|
| 890 |
-
)
|
| 891 |
|
|
|
|
|
|
|
|
|
|
| 892 |
|
| 893 |
-
|
| 894 |
-
|
| 895 |
-
|
| 896 |
-
|
| 897 |
-
|
| 898 |
-
f"ML modeli güvensiz — sadece teknik sinyallerle çalışılıyor.\n"
|
| 899 |
-
f"Sebep: <code>{reason[:200]}</code>"
|
| 900 |
-
)
|
| 901 |
|
|
|
|
| 902 |
|
| 903 |
-
|
| 904 |
-
|
| 905 |
-
|
| 906 |
-
|
| 907 |
-
|
| 908 |
-
f"Tarama tamamlandı ama kriterleri geçen hisse bulunamadı."
|
| 909 |
-
)
|
| 910 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 911 |
|
| 912 |
-
|
| 913 |
-
"""Sinyal üretilemediğinde bildirim."""
|
| 914 |
-
now = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 915 |
-
send(
|
| 916 |
-
f"⚠️ <b>Sinyal Üretilemedi</b> [{now}]\n\n"
|
| 917 |
-
f"Eligible hisseler için sinyal üretilemedi."
|
| 918 |
-
)
|
| 919 |
|
|
|
|
|
|
|
| 920 |
|
| 921 |
-
|
| 922 |
-
# Komut dağıtıcı
|
| 923 |
-
# ═══════════════════════════════════════════════════════════════════════════
|
| 924 |
|
| 925 |
-
SIMPLE_COMMANDS = {
|
| 926 |
-
"/durum": cmd_durum,
|
| 927 |
-
"/portfoy": cmd_portfoy,
|
| 928 |
-
"/hisseler": cmd_hisseler,
|
| 929 |
-
"/sinyaller": cmd_sinyaller,
|
| 930 |
-
"/islemler": cmd_islemler,
|
| 931 |
-
"/tarama": cmd_tarama,
|
| 932 |
-
"/yardim": cmd_yardim,
|
| 933 |
-
"/help": cmd_yardim,
|
| 934 |
-
"/start": cmd_yardim,
|
| 935 |
-
}
|
| 936 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 937 |
|
| 938 |
-
|
| 939 |
-
|
| 940 |
-
|
| 941 |
-
cmd = parts[0].lower()
|
| 942 |
|
| 943 |
-
|
| 944 |
-
if cmd == "/hisse":
|
| 945 |
-
if len(parts) < 2:
|
| 946 |
-
send("Kullanım: /hisse THYAO\nÖrnek: /hisse ASELS")
|
| 947 |
-
return
|
| 948 |
try:
|
| 949 |
-
|
| 950 |
-
|
| 951 |
-
|
| 952 |
-
|
| 953 |
-
|
| 954 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 955 |
|
| 956 |
-
handler = SIMPLE_COMMANDS.get(cmd)
|
| 957 |
-
if handler:
|
| 958 |
-
try:
|
| 959 |
-
reply = handler()
|
| 960 |
-
send(reply)
|
| 961 |
except Exception as e:
|
| 962 |
-
logger.
|
| 963 |
-
|
| 964 |
-
|
| 965 |
-
send(
|
| 966 |
-
f"❓ Bilinmeyen komut: <code>{cmd}</code>\n"
|
| 967 |
-
"/yardim — komut listesi"
|
| 968 |
-
)
|
| 969 |
|
| 970 |
|
| 971 |
-
#
|
| 972 |
-
#
|
| 973 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 974 |
|
| 975 |
def main() -> None:
|
|
|
|
|
|
|
| 976 |
logger.info("Telegram bot başlatıldı (chat_id=%s)", CHAT_ID)
|
| 977 |
-
send(
|
| 978 |
-
|
| 979 |
-
|
| 980 |
-
|
| 981 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 982 |
|
| 983 |
offset = 0
|
| 984 |
while True:
|
|
@@ -991,12 +862,24 @@ def main() -> None:
|
|
| 991 |
if str(msg.get("chat", {}).get("id", "")) != str(CHAT_ID):
|
| 992 |
continue
|
| 993 |
|
| 994 |
-
text = msg.get("text", "").strip()
|
| 995 |
if not text:
|
| 996 |
continue
|
| 997 |
|
| 998 |
logger.info("Komut: %s", text)
|
| 999 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1000 |
|
| 1001 |
if not updates:
|
| 1002 |
time.sleep(1)
|
|
|
|
| 1 |
+
"""Bağımsız Telegram Bot — Worker çalışmasa da yanıt verir.
|
| 2 |
|
| 3 |
+
Dosya ve DB'den okuyarak komutlara yanıt verir.
|
|
|
|
| 4 |
|
| 5 |
Kullanım:
|
| 6 |
python telegram_bot.py # Foreground (test)
|
| 7 |
python telegram_bot.py & # Arka planda
|
| 8 |
|
| 9 |
+
Desteklenen komutlar:
|
| 10 |
+
/durum — Sistem durumu
|
| 11 |
+
/portfoy — Portföy özeti
|
| 12 |
+
/hisseler — Eligible hisse listesi
|
| 13 |
/sinyaller — Bugünkü BUY/SELL sinyalleri
|
|
|
|
|
|
|
|
|
|
| 14 |
/yardim — Komut listesi
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
"""
|
| 16 |
from __future__ import annotations
|
| 17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
import json
|
| 19 |
import logging
|
| 20 |
import os
|
| 21 |
+
import socket as _socket
|
| 22 |
import sys
|
| 23 |
import time
|
| 24 |
+
import urllib.parse
|
| 25 |
import urllib.request
|
| 26 |
import urllib.error
|
| 27 |
from datetime import datetime, timezone
|
| 28 |
from pathlib import Path
|
|
|
|
| 29 |
|
| 30 |
logging.basicConfig(
|
| 31 |
level=logging.INFO,
|
|
|
|
| 37 |
CHAT_ID = os.environ.get("TELEGRAM_CHAT_ID", "")
|
| 38 |
|
| 39 |
if not BOT_TOKEN or not CHAT_ID:
|
| 40 |
+
# .env dosyasından oku
|
| 41 |
env_file = Path(__file__).parent / ".env"
|
| 42 |
if env_file.exists():
|
| 43 |
for line in env_file.read_text().splitlines():
|
|
|
|
| 51 |
print("HATA: TELEGRAM_BOT_TOKEN bulunamadı. .env dosyasını kontrol edin.")
|
| 52 |
sys.exit(1)
|
| 53 |
|
| 54 |
+
# HF Spaces içinde api.telegram.org DNS çözümlemesi kararsız olabiliyor.
|
| 55 |
+
# Doğrudan Telegram çağrılarında sabit IP fallback kullan.
|
| 56 |
+
_orig_getaddrinfo = _socket.getaddrinfo
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def _tg_dns_fix(host, port, family=0, type=0, proto=0, flags=0):
|
| 60 |
+
if host == "api.telegram.org":
|
| 61 |
+
return _orig_getaddrinfo("149.154.167.220", port, family, type, proto, flags)
|
| 62 |
+
return _orig_getaddrinfo(host, port, family, type, proto, flags)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
_socket.getaddrinfo = _tg_dns_fix
|
| 66 |
+
|
| 67 |
+
# HF Spaces api.telegram.org'a doğrudan erişimi engelliyor.
|
| 68 |
+
# TELEGRAM_PROXY_URL ayarlıysa proxy dene; olmazsa direct API fallback var.
|
| 69 |
_PROXY_URL = os.environ.get(
|
| 70 |
"TELEGRAM_PROXY_URL",
|
| 71 |
+
"https://borsanova.netlify.app/api/tgproxy",
|
| 72 |
)
|
|
|
|
| 73 |
if _PROXY_URL:
|
| 74 |
+
_PROXY_URL = _PROXY_URL.strip()
|
| 75 |
+
if _PROXY_URL in {"", "none", "null"}:
|
| 76 |
+
_PROXY_URL = ""
|
| 77 |
+
if "telegram-proxy-vercel-neon.vercel.app" in (_PROXY_URL or ""):
|
| 78 |
+
logger.warning("Ignoring legacy dead TELEGRAM_PROXY_URL override: %s", _PROXY_URL)
|
| 79 |
+
_PROXY_URL = ""
|
| 80 |
+
|
| 81 |
+
_DIRECT_API_BASE = f"https://api.telegram.org/bot{BOT_TOKEN}"
|
| 82 |
+
_API_BASE_CANDIDATES = []
|
| 83 |
+
if _PROXY_URL:
|
| 84 |
+
_API_BASE_CANDIDATES.append(f"{_PROXY_URL.rstrip('/')}/bot{BOT_TOKEN}")
|
| 85 |
+
_API_BASE_CANDIDATES.append(_DIRECT_API_BASE)
|
| 86 |
+
API_BASE = ""
|
| 87 |
+
_NOPROXY_OPENER = urllib.request.build_opener(
|
| 88 |
+
urllib.request.ProxyHandler({}),
|
| 89 |
+
urllib.request.HTTPSHandler(),
|
| 90 |
+
)
|
| 91 |
|
| 92 |
+
# paper_trading dizini huggingface-space'in bir üstünde olabilir
|
| 93 |
_SCRIPT_DIR = Path(__file__).parent
|
| 94 |
+
_PAPER_DIR = (
|
| 95 |
_SCRIPT_DIR / "paper_trading"
|
| 96 |
if (_SCRIPT_DIR / "paper_trading").exists()
|
| 97 |
else _SCRIPT_DIR.parent / "paper_trading"
|
| 98 |
)
|
| 99 |
|
| 100 |
_STATUS_FILE = _PAPER_DIR / "auto_trader" / "status.json"
|
| 101 |
+
_STATE_FILE = _PAPER_DIR / "auto_trader" / "state.json"
|
| 102 |
+
_SCAN_FILE = _PAPER_DIR / "bist100_scan_results.json"
|
| 103 |
+
_DB_PATH = _PAPER_DIR / "trading.db"
|
| 104 |
+
|
| 105 |
+
# US market paths
|
| 106 |
+
_US_DIR = (
|
| 107 |
+
_SCRIPT_DIR / "paper_trading_us"
|
| 108 |
+
if (_SCRIPT_DIR / "paper_trading_us").exists()
|
| 109 |
+
else _SCRIPT_DIR.parent / "paper_trading_us"
|
| 110 |
+
)
|
| 111 |
+
_US_STATUS_FILE = _US_DIR / "auto_trader" / "status.json"
|
| 112 |
+
_US_STATE_FILE = _US_DIR / "auto_trader" / "state.json"
|
| 113 |
+
_US_SCAN_FILE = _US_DIR / "us_scan_results.json"
|
| 114 |
+
_US_DB_PATH = _US_DIR / "trading.db"
|
| 115 |
|
| 116 |
|
| 117 |
+
# ---------------------------------------------------------------------------
|
| 118 |
# Telegram API helpers
|
| 119 |
+
# ---------------------------------------------------------------------------
|
| 120 |
|
| 121 |
+
def _probe_api_base(base: str) -> bool:
|
| 122 |
+
try:
|
| 123 |
+
req = urllib.request.Request(f"{base}/getMe")
|
| 124 |
+
with _NOPROXY_OPENER.open(req, timeout=10) as resp:
|
| 125 |
+
payload = json.loads(resp.read().decode())
|
| 126 |
+
return bool(payload.get("ok"))
|
| 127 |
+
except Exception as e:
|
| 128 |
+
logger.warning("Telegram API base probe failed: %s (%s)", base, e)
|
| 129 |
+
return False
|
| 130 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
|
| 132 |
+
def _resolve_api_base(force: bool = False) -> str:
|
| 133 |
+
global API_BASE
|
| 134 |
+
if API_BASE and not force:
|
| 135 |
+
return API_BASE
|
| 136 |
+
|
| 137 |
+
for base in _API_BASE_CANDIDATES:
|
| 138 |
+
if _probe_api_base(base):
|
| 139 |
+
API_BASE = base
|
| 140 |
+
logger.info("Telegram API base selected: %s", base)
|
| 141 |
+
return API_BASE
|
| 142 |
+
|
| 143 |
+
raise RuntimeError("No reachable Telegram API base found")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 144 |
|
| 145 |
+
def _api(method: str, payload: dict) -> dict:
|
| 146 |
+
base = _resolve_api_base()
|
| 147 |
+
data = json.dumps(payload).encode()
|
| 148 |
req = urllib.request.Request(
|
| 149 |
+
f"{base}/{method}",
|
| 150 |
+
data=data,
|
| 151 |
headers={"Content-Type": "application/json"},
|
| 152 |
)
|
| 153 |
+
# getUpdates long-poll 20s bekleyebilir → urllib timeout daha yüksek olmalı
|
| 154 |
+
sock_timeout = 30 if "getUpdates" in method else 15
|
| 155 |
+
try:
|
| 156 |
+
with _NOPROXY_OPENER.open(req, timeout=sock_timeout) as resp:
|
| 157 |
+
return json.loads(resp.read().decode())
|
| 158 |
+
except Exception:
|
| 159 |
+
retry_base = _resolve_api_base(force=True)
|
| 160 |
+
retry_req = urllib.request.Request(
|
| 161 |
+
f"{retry_base}/{method}",
|
| 162 |
+
data=data,
|
| 163 |
+
headers={"Content-Type": "application/json"},
|
| 164 |
+
)
|
| 165 |
+
with _NOPROXY_OPENER.open(retry_req, timeout=sock_timeout) as resp:
|
| 166 |
+
return json.loads(resp.read().decode())
|
| 167 |
|
| 168 |
|
| 169 |
+
def send(text: str, parse_mode: str = "HTML") -> None:
|
| 170 |
+
try:
|
| 171 |
+
_api("sendMessage", {
|
| 172 |
+
"chat_id": CHAT_ID,
|
| 173 |
+
"text": text,
|
| 174 |
+
"parse_mode": parse_mode,
|
| 175 |
+
})
|
| 176 |
+
except Exception as e:
|
| 177 |
+
logger.error("Send failed: %s", e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 178 |
|
| 179 |
|
| 180 |
def get_updates(offset: int) -> list:
|
|
|
|
| 186 |
})
|
| 187 |
return result.get("result", [])
|
| 188 |
except urllib.error.URLError as e:
|
| 189 |
+
logger.warning("getUpdates network error: %s", e)
|
| 190 |
return []
|
| 191 |
except Exception as e:
|
| 192 |
+
logger.warning("getUpdates error: %s", e)
|
| 193 |
return []
|
| 194 |
|
| 195 |
|
| 196 |
+
# ---------------------------------------------------------------------------
|
| 197 |
+
# Data readers
|
| 198 |
+
# ---------------------------------------------------------------------------
|
| 199 |
|
| 200 |
def _read_json(path: Path) -> dict:
|
| 201 |
try:
|
|
|
|
| 206 |
return {}
|
| 207 |
|
| 208 |
|
| 209 |
+
def _db_get_signals(date: str, limit: int = 50) -> list:
|
|
|
|
| 210 |
try:
|
| 211 |
import sqlite3
|
| 212 |
if not _DB_PATH.exists():
|
| 213 |
return []
|
| 214 |
conn = sqlite3.connect(str(_DB_PATH))
|
| 215 |
conn.row_factory = sqlite3.Row
|
| 216 |
+
rows = conn.execute(
|
| 217 |
+
"SELECT * FROM signals_log WHERE date = ? ORDER BY confidence DESC LIMIT ?",
|
| 218 |
+
(date, limit),
|
| 219 |
+
).fetchall()
|
| 220 |
conn.close()
|
| 221 |
return [dict(r) for r in rows]
|
| 222 |
+
except Exception:
|
|
|
|
| 223 |
return []
|
| 224 |
|
| 225 |
|
| 226 |
+
def _db_state_map() -> dict:
|
| 227 |
+
try:
|
| 228 |
+
import sqlite3
|
| 229 |
+
if not _DB_PATH.exists():
|
| 230 |
+
return {}
|
| 231 |
+
conn = sqlite3.connect(str(_DB_PATH))
|
| 232 |
+
rows = conn.execute("SELECT key, value FROM state").fetchall()
|
| 233 |
+
conn.close()
|
| 234 |
+
parsed = {}
|
| 235 |
+
for key, value in rows:
|
| 236 |
+
try:
|
| 237 |
+
parsed[key] = json.loads(value)
|
| 238 |
+
except Exception:
|
| 239 |
+
parsed[key] = value
|
| 240 |
+
return parsed
|
| 241 |
+
except Exception:
|
| 242 |
+
return {}
|
| 243 |
|
| 244 |
|
| 245 |
+
def _api_get_json(url: str) -> dict:
|
| 246 |
+
try:
|
| 247 |
+
req = urllib.request.Request(url, headers={"Accept": "application/json"})
|
| 248 |
+
with urllib.request.urlopen(req, timeout=15) as resp:
|
| 249 |
+
return json.loads(resp.read().decode())
|
| 250 |
+
except Exception:
|
| 251 |
+
return {}
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def _load_best_scan_payload() -> dict:
|
| 255 |
+
candidates = []
|
| 256 |
+
for path in [_PAPER_DIR / "bist100_scan_results_work.json", _PAPER_DIR / "bist100_scan_results.json"]:
|
| 257 |
+
if not path.exists():
|
| 258 |
+
continue
|
| 259 |
+
try:
|
| 260 |
+
payload = json.loads(path.read_text())
|
| 261 |
+
payload["_path"] = str(path)
|
| 262 |
+
candidates.append(payload)
|
| 263 |
+
except Exception:
|
| 264 |
+
continue
|
| 265 |
+
if not candidates:
|
| 266 |
+
return {}
|
| 267 |
+
|
| 268 |
+
def _ts(item: dict) -> str:
|
| 269 |
+
return str(item.get("updated_at") or item.get("scan_finished") or item.get("scan_started") or "")
|
| 270 |
+
|
| 271 |
+
candidates.sort(key=_ts, reverse=True)
|
| 272 |
+
return candidates[0]
|
| 273 |
|
| 274 |
|
| 275 |
def _db_get_portfolio() -> dict:
|
| 276 |
+
"""Önce state.json, yoksa sqlite state tablosundan portföy bilgisi."""
|
| 277 |
state = _read_json(_STATE_FILE)
|
| 278 |
+
if not state:
|
| 279 |
+
state = _db_state_map()
|
| 280 |
return {
|
| 281 |
+
"cash": state.get("broker_cash", 100_000),
|
| 282 |
"positions": state.get("broker_positions", {}),
|
| 283 |
}
|
| 284 |
|
| 285 |
|
| 286 |
def _is_worker_running() -> bool:
|
| 287 |
+
pid_file = _PAPER_DIR / "auto_trader" / "worker.pid"
|
| 288 |
if not pid_file.exists():
|
| 289 |
return False
|
| 290 |
try:
|
| 291 |
pid = int(pid_file.read_text().strip())
|
| 292 |
+
import os as _os
|
| 293 |
+
_os.kill(pid, 0)
|
| 294 |
return True
|
| 295 |
except Exception:
|
| 296 |
return False
|
| 297 |
|
| 298 |
|
| 299 |
+
# ---------------------------------------------------------------------------
|
| 300 |
+
# Command handlers
|
| 301 |
+
# ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 302 |
|
| 303 |
def cmd_durum() -> str:
|
| 304 |
status = _read_json(_STATUS_FILE)
|
| 305 |
+
state = _read_json(_STATE_FILE)
|
| 306 |
+
if not state:
|
| 307 |
+
state = _db_state_map()
|
| 308 |
|
| 309 |
worker_running = _is_worker_running()
|
| 310 |
is_running = status.get("is_running", False)
|
| 311 |
+
mode = status.get("mode") or state.get("mode") or status.get("market_id") or "bist"
|
| 312 |
last_run = state.get("last_run_date", "—")
|
| 313 |
total_trades = state.get("total_trades", 0)
|
| 314 |
total_days = state.get("total_days_run", 0)
|
| 315 |
+
phase = status.get("current_phase") or status.get("status", "—")
|
| 316 |
+
cash = state.get("broker_cash", 100_000)
|
| 317 |
+
|
| 318 |
+
ks_icon = "✅"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 319 |
ks_file = _PAPER_DIR / "kill_switch.json"
|
|
|
|
|
|
|
| 320 |
if ks_file.exists():
|
| 321 |
try:
|
| 322 |
ks = json.loads(ks_file.read_text())
|
| 323 |
if ks.get("active"):
|
| 324 |
+
ks_icon = f"🔴 AKTİF — {ks.get('reason','?')[:50]}"
|
| 325 |
except Exception:
|
| 326 |
pass
|
| 327 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 328 |
now_str = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 329 |
+
lines = [
|
| 330 |
+
f"📊 <b>Sistem Durumu</b> [{now_str}]",
|
| 331 |
+
"",
|
| 332 |
+
f"Worker: {'🟢 Çalışıyor' if worker_running else '🔴 Durdu'}",
|
| 333 |
+
f"Aktif Döngü: {'✅ Evet' if is_running else '—'}",
|
| 334 |
+
f"Mod: <code>{mode}</code>",
|
| 335 |
+
f"Kill Switch: {ks_icon}",
|
| 336 |
+
f"Son Çalışma: <code>{last_run}</code>",
|
| 337 |
+
f"Faz: <code>{phase}</code>",
|
| 338 |
+
"",
|
| 339 |
+
]
|
| 340 |
+
if isinstance(cash, (int, float)):
|
| 341 |
+
lines.append(f"💰 Nakit: <code>{cash:,.0f} TL</code>")
|
| 342 |
+
else:
|
| 343 |
+
lines.append("💰 Nakit: <code>bilinmiyor</code>")
|
| 344 |
+
lines.append(f"📈 Toplam İşlem: <code>{total_trades}</code>")
|
| 345 |
+
lines.append(f"📅 Çalışılan Gün: <code>{total_days}</code>")
|
| 346 |
+
return "\n".join(lines)
|
| 347 |
|
| 348 |
|
| 349 |
def cmd_portfoy() -> str:
|
|
|
|
| 359 |
)
|
| 360 |
|
| 361 |
lines = [f"💼 <b>Portföy</b> ({len(positions)} pozisyon)\n"]
|
| 362 |
+
total_notional = 0.0
|
| 363 |
+
for sym, pos in list(positions.items())[:15]:
|
| 364 |
+
qty = pos.get("qty", 0)
|
| 365 |
cost = pos.get("avg_cost", 0)
|
| 366 |
+
notional = qty * cost
|
| 367 |
+
total_notional += notional
|
| 368 |
+
lines.append(f"• <code>{sym}</code> {qty} adet @ {cost:.2f} TL ({notional:,.0f} TL)")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 369 |
|
| 370 |
+
if len(positions) > 15:
|
| 371 |
+
lines.append(f" ... ve {len(positions) - 15} pozisyon daha")
|
| 372 |
|
| 373 |
+
equity = cash + total_notional
|
| 374 |
lines.append(f"\n💰 Nakit: <code>{cash:,.0f} TL</code>")
|
| 375 |
lines.append(f"📊 Toplam Değer: <code>{equity:,.0f} TL</code>")
|
| 376 |
return "\n".join(lines)
|
| 377 |
|
| 378 |
|
| 379 |
def cmd_hisseler() -> str:
|
| 380 |
+
data = _api_get_json("http://localhost:7860/api/eligible?market=bist")
|
| 381 |
+
if not data:
|
| 382 |
+
scan = _load_best_scan_payload()
|
| 383 |
+
if not scan:
|
| 384 |
+
return "❌ Eligible API yanıt vermiyor ve scan dosyası bulunamadı."
|
| 385 |
+
|
| 386 |
+
stage2 = scan.get("stage2", {}) or {}
|
| 387 |
+
eligible_pairs = [(sym, info) for sym, info in stage2.items() if isinstance(info, dict) and info.get("eligible")]
|
| 388 |
+
stage1 = scan.get("stage1", {}) or {}
|
| 389 |
+
stage1_done = len(stage1) if isinstance(stage1, dict) else 0
|
| 390 |
+
stage1_passed = sum(1 for v in stage1.values() if isinstance(v, dict) and v.get("passed")) if isinstance(stage1, dict) else 0
|
| 391 |
+
updated_at = str(scan.get("updated_at") or scan.get("scan_started") or "?")[:16]
|
| 392 |
+
|
| 393 |
+
if not eligible_pairs:
|
| 394 |
+
return (
|
| 395 |
+
"🔄 <b>BIST Tarama durumu</b>\n\n"
|
| 396 |
+
f"Stage 1: {stage1_passed}/{stage1_done or '?'} gecti\n"
|
| 397 |
+
f"Stage 2: {len(stage2)} tamamlandi\n"
|
| 398 |
+
f"Güncelleme: <code>{updated_at} UTC</code>"
|
| 399 |
+
)
|
| 400 |
|
| 401 |
+
lines = [
|
| 402 |
+
f"📋 <b>Eligible Hisseler</b> [{updated_at}]",
|
| 403 |
+
f"Toplam: <b>{len(eligible_pairs)}</b> hisse",
|
| 404 |
+
"",
|
| 405 |
+
]
|
| 406 |
+
for sym, item in eligible_pairs[:20]:
|
| 407 |
+
sharpe = float(item.get("sharpe", 0) or 0)
|
| 408 |
+
hit_rate = float(item.get("hit_rate", 0) or 0)
|
| 409 |
+
total_return = float(item.get("total_return_pct", 0) or 0)
|
| 410 |
+
lines.append(f"• <code>{sym:<8}</code> Sharpe:{sharpe:>5.2f} Hit:%{hit_rate:>4.0f} Ret:{total_return:>+5.1f}%")
|
| 411 |
+
return "\n".join(lines)
|
| 412 |
+
|
| 413 |
+
scan_running = bool(data.get("scanRunning"))
|
| 414 |
+
progress = data.get("scanProgress", "")
|
| 415 |
+
updated_at = str(data.get("updatedAt") or data.get("timestamp") or "?")[:16]
|
| 416 |
+
eligible = data.get("eligible", []) or []
|
| 417 |
+
|
| 418 |
+
if scan_running and not eligible:
|
| 419 |
+
return (
|
| 420 |
+
"🔄 <b>BIST Tarama devam ediyor</b>\n\n"
|
| 421 |
+
f"{progress}\n"
|
| 422 |
+
f"Güncelleme: <code>{updated_at} UTC</code>"
|
| 423 |
+
)
|
| 424 |
|
| 425 |
if not eligible:
|
| 426 |
return (
|
| 427 |
+
"📋 <b>Eligible Hisseler</b>\n\n"
|
| 428 |
+
"Uygun hisse bulunamadı.\n"
|
| 429 |
+
f"Durum: <code>{progress or 'hazır değil'}</code>"
|
| 430 |
)
|
| 431 |
|
| 432 |
lines = [
|
| 433 |
+
f"📋 <b>Eligible Hisseler</b> [{updated_at}]\n"
|
| 434 |
+
f"Toplam: <b>{len(eligible)}</b> hisse\n"
|
| 435 |
]
|
| 436 |
+
for item in eligible[:20]:
|
| 437 |
+
sym = item.get("symbol", "?")
|
| 438 |
+
sharpe = float(item.get("sharpe", 0) or 0)
|
| 439 |
+
hit_rate = float(item.get("hit_rate", 0) or 0) * 100
|
| 440 |
+
annual_return = float(item.get("annual_return", 0) or 0) * 100
|
|
|
|
|
|
|
|
|
|
|
|
|
| 441 |
lines.append(
|
| 442 |
+
f"• <code>{sym:<8}</code> Sharpe:{sharpe:>5.2f} Hit:%{hit_rate:>4.0f} Ret:{annual_return:>+5.1f}%"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 443 |
)
|
| 444 |
|
| 445 |
+
if scan_running:
|
| 446 |
+
lines.append(f"\n🔄 {progress}")
|
| 447 |
|
| 448 |
return "\n".join(lines)
|
| 449 |
|
| 450 |
|
| 451 |
def cmd_sinyaller() -> str:
|
|
|
|
| 452 |
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 453 |
signals = _db_get_signals(today)
|
| 454 |
|
|
|
|
| 456 |
return (
|
| 457 |
f"📡 <b>Bugünkü Sinyaller</b> [{today}]\n\n"
|
| 458 |
"Henüz sinyal üretilmedi.\n"
|
| 459 |
+
"Sinyaller sabah 10:30'da üretilir."
|
| 460 |
)
|
| 461 |
|
| 462 |
+
buys = [s for s in signals if s.get("signal") == "BUY"]
|
| 463 |
sells = [s for s in signals if s.get("signal") == "SELL"]
|
| 464 |
holds = [s for s in signals if s.get("signal") == "HOLD"]
|
| 465 |
|
| 466 |
lines = [
|
| 467 |
f"📡 <b>Sinyaller</b> [{today}]\n"
|
| 468 |
+
f"🟢 BUY:{len(buys)} 🔴 SELL:{len(sells)} ⚪ HOLD:{len(holds)}\n"
|
| 469 |
]
|
| 470 |
|
| 471 |
if buys:
|
| 472 |
lines.append("🟢 <b>BUY Sinyalleri:</b>")
|
| 473 |
+
for s in buys[:10]:
|
| 474 |
+
conf = float(s.get("confidence", 0)) * 100
|
| 475 |
pred = float(s.get("predicted_return", 0))
|
|
|
|
|
|
|
|
|
|
| 476 |
lines.append(
|
| 477 |
f" <code>{s['symbol']:<8}</code> "
|
| 478 |
+
f"Güven:%{conf:>4.0f} Tahmin:{pred:>+5.1f}%"
|
|
|
|
| 479 |
)
|
| 480 |
|
| 481 |
if sells:
|
| 482 |
lines.append("\n🔴 <b>SELL Sinyalleri:</b>")
|
| 483 |
+
for s in sells[:5]:
|
| 484 |
+
conf = float(s.get("confidence", 0)) * 100
|
| 485 |
+
lines.append(f" <code>{s['symbol']:<8}</code> Güven:%{conf:>4.0f}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 486 |
|
| 487 |
return "\n".join(lines)
|
| 488 |
|
| 489 |
|
| 490 |
+
def cmd_yardim() -> str:
|
| 491 |
+
return (
|
| 492 |
+
"🤖 <b>Borsanova Komutları</b>\n\n"
|
| 493 |
+
"<b>BIST:</b>\n"
|
| 494 |
+
"/durum — Sistem ve worker durumu\n"
|
| 495 |
+
"/portfoy — Açık pozisyonlar\n"
|
| 496 |
+
"/hisseler — Eligible hisse listesi (scanner)\n"
|
| 497 |
+
"/sinyaller — Bugünkü BUY/SELL sinyalleri\n\n"
|
| 498 |
+
"<b>ABD Borsası:</b>\n"
|
| 499 |
+
"/us_durum — US worker durumu\n"
|
| 500 |
+
"/us_portfoy — US açık pozisyonlar\n"
|
| 501 |
+
"/us_hisseler — US eligible hisseler\n"
|
| 502 |
+
"/us_sinyaller — US sinyalleri\n\n"
|
| 503 |
+
"/yardim — Bu mesaj\n\n"
|
| 504 |
+
"<i>Bu bot worker bağımsız çalışır.</i>"
|
| 505 |
+
)
|
| 506 |
|
|
|
|
|
|
|
|
|
|
| 507 |
|
| 508 |
+
# ---------------------------------------------------------------------------
|
| 509 |
+
# US Market Commands
|
| 510 |
+
# ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
|
| 511 |
|
| 512 |
+
def _us_db_get_signals(date: str, limit: int = 50) -> list:
|
| 513 |
+
try:
|
| 514 |
+
import sqlite3
|
| 515 |
+
if not _US_DB_PATH.exists():
|
| 516 |
+
return []
|
| 517 |
+
conn = sqlite3.connect(str(_US_DB_PATH))
|
| 518 |
+
conn.row_factory = sqlite3.Row
|
| 519 |
+
rows = conn.execute(
|
| 520 |
+
"SELECT * FROM signals_log WHERE date = ? ORDER BY confidence DESC LIMIT ?",
|
| 521 |
+
(date, limit),
|
| 522 |
+
).fetchall()
|
| 523 |
+
conn.close()
|
| 524 |
+
return [dict(r) for r in rows]
|
| 525 |
+
except Exception:
|
| 526 |
+
return []
|
| 527 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 528 |
|
| 529 |
+
def _us_db_state_map() -> dict:
|
| 530 |
+
try:
|
| 531 |
+
import sqlite3
|
| 532 |
+
if not _US_DB_PATH.exists():
|
| 533 |
+
return {}
|
| 534 |
+
conn = sqlite3.connect(str(_US_DB_PATH))
|
| 535 |
+
rows = conn.execute("SELECT key, value FROM state").fetchall()
|
| 536 |
+
conn.close()
|
| 537 |
+
parsed = {}
|
| 538 |
+
for key, value in rows:
|
| 539 |
+
try:
|
| 540 |
+
parsed[key] = json.loads(value)
|
| 541 |
+
except Exception:
|
| 542 |
+
parsed[key] = value
|
| 543 |
+
return parsed
|
| 544 |
+
except Exception:
|
| 545 |
+
return {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 546 |
|
|
|
|
| 547 |
|
| 548 |
+
def _us_db_get_portfolio() -> dict:
|
| 549 |
+
state = _read_json(_US_STATE_FILE)
|
| 550 |
+
if not state:
|
| 551 |
+
state = _us_db_state_map()
|
| 552 |
+
return {
|
| 553 |
+
"cash": state.get("broker_cash", 100_000),
|
| 554 |
+
"positions": state.get("broker_positions", {}),
|
| 555 |
+
}
|
| 556 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 557 |
|
| 558 |
+
def _us_is_worker_running() -> bool:
|
| 559 |
+
pid_file = _US_DIR / "auto_trader" / "worker.pid"
|
| 560 |
+
if not pid_file.exists():
|
| 561 |
+
return False
|
| 562 |
try:
|
| 563 |
+
pid = int(pid_file.read_text().strip())
|
| 564 |
+
os.kill(pid, 0)
|
| 565 |
+
return True
|
| 566 |
+
except Exception:
|
| 567 |
+
return False
|
| 568 |
+
|
| 569 |
+
|
| 570 |
+
def cmd_us_durum() -> str:
|
| 571 |
+
status = _read_json(_US_STATUS_FILE)
|
| 572 |
+
state = _read_json(_US_STATE_FILE)
|
| 573 |
+
if not state:
|
| 574 |
+
state = _us_db_state_map()
|
| 575 |
+
|
| 576 |
+
worker_running = _us_is_worker_running()
|
| 577 |
+
is_running = status.get("is_running", False)
|
| 578 |
+
last_run = state.get("last_run_date", "—")
|
| 579 |
+
total_trades = state.get("total_trades", 0)
|
| 580 |
+
total_days = state.get("total_days_run", 0)
|
| 581 |
+
phase = status.get("current_phase") or status.get("status", "—")
|
| 582 |
+
cash = state.get("broker_cash", 100_000)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 583 |
|
| 584 |
+
now_str = datetime.now(timezone.utc).strftime("%H:%M UTC")
|
| 585 |
lines = [
|
| 586 |
+
f"🇺🇸 <b>US Sistem Durumu</b> [{now_str}]",
|
| 587 |
+
"",
|
| 588 |
+
f"Worker: {'🟢 Çalışıyor' if worker_running else '🔴 Durdu'}",
|
| 589 |
+
f"Aktif Döngü: {'✅ Evet' if is_running else '—'}",
|
| 590 |
+
f"Son Çalışma: <code>{last_run}</code>",
|
| 591 |
+
f"Faz: <code>{phase}</code>",
|
| 592 |
+
"",
|
| 593 |
+
f"💰 Nakit: <code>${cash:,.0f}</code>",
|
| 594 |
+
f"📈 Toplam İşlem: <code>{total_trades}</code>",
|
| 595 |
+
f"📅 Çalışılan Gün: <code>{total_days}</code>",
|
| 596 |
]
|
| 597 |
+
return "\n".join(lines)
|
|
|
|
| 598 |
|
|
|
|
|
|
|
| 599 |
|
| 600 |
+
def cmd_us_portfoy() -> str:
|
| 601 |
+
data = _us_db_get_portfolio()
|
| 602 |
+
cash = data["cash"]
|
| 603 |
+
positions = data["positions"]
|
| 604 |
|
| 605 |
+
if not positions:
|
| 606 |
+
return (
|
| 607 |
+
f"🇺🇸 <b>US Portföy</b>\n\n"
|
| 608 |
+
f"Açık pozisyon yok.\n"
|
| 609 |
+
f"💰 Nakit: <code>${cash:,.0f}</code>"
|
| 610 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 611 |
|
| 612 |
+
lines = [f"🇺🇸 <b>US Portföy</b> ({len(positions)} pozisyon)\n"]
|
| 613 |
+
total_notional = 0.0
|
| 614 |
+
for sym, pos in list(positions.items())[:15]:
|
| 615 |
+
qty = pos.get("qty", 0)
|
| 616 |
+
cost = pos.get("avg_cost", 0)
|
| 617 |
+
notional = qty * cost
|
| 618 |
+
total_notional += notional
|
| 619 |
+
lines.append(f"• <code>{sym}</code> {qty} adet @ ${cost:.2f} (${notional:,.0f})")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 620 |
|
| 621 |
+
if len(positions) > 15:
|
| 622 |
+
lines.append(f" ... ve {len(positions) - 15} pozisyon daha")
|
| 623 |
|
| 624 |
+
equity = cash + total_notional
|
| 625 |
+
lines.append(f"\n💰 Nakit: <code>${cash:,.0f}</code>")
|
| 626 |
+
lines.append(f"📊 Toplam Değer: <code>${equity:,.0f}</code>")
|
| 627 |
+
return "\n".join(lines)
|
| 628 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 629 |
|
| 630 |
+
def cmd_us_hisseler() -> str:
|
| 631 |
+
data = _api_get_json("http://localhost:7860/api/eligible?market=us")
|
| 632 |
+
if not data:
|
| 633 |
+
# Fallback: read scan file directly
|
| 634 |
+
if _US_SCAN_FILE.exists():
|
| 635 |
+
try:
|
| 636 |
+
scan = json.loads(_US_SCAN_FILE.read_text())
|
| 637 |
+
stage2 = scan.get("stage2", {}) or {}
|
| 638 |
+
eligible_pairs = [(sym, info) for sym, info in stage2.items()
|
| 639 |
+
if isinstance(info, dict) and info.get("eligible")]
|
| 640 |
+
updated_at = str(scan.get("updated_at") or scan.get("scan_finished") or "?")[:16]
|
| 641 |
+
|
| 642 |
+
if not eligible_pairs:
|
| 643 |
+
return "🇺🇸 <b>US Eligible</b>\n\nHenüz eligible hisse yok."
|
| 644 |
+
|
| 645 |
+
lines = [
|
| 646 |
+
f"🇺🇸 <b>US Eligible Hisseler</b> [{updated_at}]",
|
| 647 |
+
f"Toplam: <b>{len(eligible_pairs)}</b> hisse\n",
|
| 648 |
+
]
|
| 649 |
+
for sym, item in eligible_pairs[:20]:
|
| 650 |
+
sharpe = float(item.get("sharpe", 0) or 0)
|
| 651 |
+
hit_rate = float(item.get("hit_rate", 0) or 0)
|
| 652 |
+
total_return = float(item.get("total_return_pct", 0) or 0)
|
| 653 |
+
lines.append(f"• <code>{sym:<8}</code> Sharpe:{sharpe:>5.2f} Hit:%{hit_rate:>4.0f} Ret:{total_return:>+5.1f}%")
|
| 654 |
+
return "\n".join(lines)
|
| 655 |
+
except Exception:
|
| 656 |
+
pass
|
| 657 |
+
return "🇺🇸 <b>US Eligible</b>\n\nHenüz US taraması yapılmadı veya veri yok."
|
| 658 |
+
|
| 659 |
+
eligible = data.get("eligible", []) or []
|
| 660 |
+
updated_at = str(data.get("updatedAt") or data.get("timestamp") or "?")[:16]
|
| 661 |
|
| 662 |
+
if not eligible:
|
| 663 |
+
return "🇺🇸 <b>US Eligible</b>\n\nUygun hisse bulunamadı."
|
|
|
|
|
|
|
|
|
|
|
|
|
| 664 |
|
| 665 |
lines = [
|
| 666 |
+
f"🇺🇸 <b>US Eligible Hisseler</b> [{updated_at}]\n"
|
| 667 |
+
f"Toplam: <b>{len(eligible)}</b> hisse\n"
|
|
|
|
| 668 |
]
|
| 669 |
+
for item in eligible[:20]:
|
| 670 |
+
sym = item.get("symbol", "?")
|
| 671 |
+
sharpe = float(item.get("sharpe", 0) or 0)
|
| 672 |
+
hit_rate = float(item.get("hit_rate", 0) or 0) * 100
|
| 673 |
+
annual_return = float(item.get("annual_return", 0) or 0) * 100
|
| 674 |
+
lines.append(f"• <code>{sym:<8}</code> Sharpe:{sharpe:>5.2f} Hit:%{hit_rate:>4.0f} Ret:{annual_return:>+5.1f}%")
|
| 675 |
+
return "\n".join(lines)
|
| 676 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 677 |
|
| 678 |
+
def cmd_us_sinyaller() -> str:
|
| 679 |
+
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 680 |
+
signals = _us_db_get_signals(today)
|
| 681 |
|
| 682 |
+
if not signals:
|
| 683 |
+
return (
|
| 684 |
+
f"🇺🇸 <b>US Sinyaller</b> [{today}]\n\n"
|
| 685 |
+
"Henüz sinyal üretilmedi."
|
| 686 |
+
)
|
| 687 |
|
| 688 |
+
buys = [s for s in signals if s.get("signal") == "BUY"]
|
| 689 |
+
sells = [s for s in signals if s.get("signal") == "SELL"]
|
| 690 |
+
holds = [s for s in signals if s.get("signal") == "HOLD"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 691 |
|
| 692 |
lines = [
|
| 693 |
+
f"🇺🇸 <b>US Sinyaller</b> [{today}]\n"
|
| 694 |
+
f"🟢 BUY:{len(buys)} 🔴 SELL:{len(sells)} ⚪ HOLD:{len(holds)}\n"
|
|
|
|
| 695 |
]
|
| 696 |
|
|
|
|
| 697 |
if buys:
|
| 698 |
+
lines.append("🟢 <b>BUY Sinyalleri:</b>")
|
| 699 |
+
for s in buys[:10]:
|
| 700 |
+
conf = float(s.get("confidence", 0)) * 100
|
| 701 |
+
pred = float(s.get("predicted_return", 0))
|
| 702 |
+
lines.append(f" <code>{s['symbol']:<8}</code> Güven:%{conf:>4.0f} Tahmin:{pred:>+5.1f}%")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 703 |
|
| 704 |
+
if sells:
|
| 705 |
+
lines.append("\n🔴 <b>SELL Sinyalleri:</b>")
|
| 706 |
+
for s in sells[:5]:
|
| 707 |
+
conf = float(s.get("confidence", 0)) * 100
|
| 708 |
+
lines.append(f" <code>{s['symbol']:<8}</code> Güven:%{conf:>4.0f}")
|
| 709 |
|
| 710 |
+
return "\n".join(lines)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 711 |
|
|
|
|
| 712 |
|
| 713 |
+
# ---------------------------------------------------------------------------
|
| 714 |
+
# Scan watcher — proaktif bildirim (tarama başladı / tamamlandı)
|
| 715 |
+
# ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 716 |
|
| 717 |
+
def _scan_watcher() -> None:
|
| 718 |
+
"""Arka planda çalışır: scan başlayınca ve bitince Telegram mesajı atar."""
|
| 719 |
+
LOCAL_API = "http://localhost:7860/api/eligible?market=bist"
|
| 720 |
+
CHECK_INTERVAL = 60 # saniye
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 721 |
|
| 722 |
+
was_running = False
|
| 723 |
+
initialized = False
|
| 724 |
+
logger.info("[scan_watcher] Başlatıldı — %ds aralıkla kontrol", CHECK_INTERVAL)
|
| 725 |
|
| 726 |
+
while True:
|
| 727 |
+
try:
|
| 728 |
+
req = urllib.request.Request(LOCAL_API, headers={"Accept": "application/json"})
|
| 729 |
+
with urllib.request.urlopen(req, timeout=10) as resp:
|
| 730 |
+
data = json.loads(resp.read().decode())
|
|
|
|
|
|
|
|
|
|
| 731 |
|
| 732 |
+
is_running = bool(data.get("scanRunning", False))
|
| 733 |
|
| 734 |
+
if not initialized:
|
| 735 |
+
was_running = is_running
|
| 736 |
+
initialized = True
|
| 737 |
+
time.sleep(CHECK_INTERVAL)
|
| 738 |
+
continue
|
|
|
|
|
|
|
| 739 |
|
| 740 |
+
if not was_running and is_running:
|
| 741 |
+
# Tarama başladı
|
| 742 |
+
universe = data.get("universe", "BIST100")
|
| 743 |
+
send(f"🔍 <b>BIST Tarama başladı</b>\nEvren: <code>{universe.upper()}</code>\n⏳ Tamamlanması birkaç saat sürebilir.")
|
| 744 |
+
logger.info("[scan_watcher] Tarama başladı bildirimi gönderildi")
|
| 745 |
+
|
| 746 |
+
elif was_running and not is_running:
|
| 747 |
+
# Tarama bitti
|
| 748 |
+
eligible_count = data.get("eligibleCount") or 0
|
| 749 |
+
progress = data.get("scanProgress", "")
|
| 750 |
+
if "hata" in progress.lower() or "error" in progress.lower():
|
| 751 |
+
send(f"❌ <b>BIST Tarama hatası</b>\n<code>{progress}</code>")
|
| 752 |
+
logger.info("[scan_watcher] Tarama hata bildirimi: %s", progress)
|
| 753 |
+
else:
|
| 754 |
+
updated = data.get("updatedAt", "")[:16] if data.get("updatedAt") else "?"
|
| 755 |
+
send(
|
| 756 |
+
f"✅ <b>BIST Tarama tamamlandı</b>\n"
|
| 757 |
+
f"Eligible: <b>{eligible_count}</b> hisse\n"
|
| 758 |
+
f"Tamamlandı: <code>{updated} UTC</code>\n"
|
| 759 |
+
f"/hisseler — listeyi görmek için"
|
| 760 |
+
)
|
| 761 |
+
logger.info("[scan_watcher] Tarama bitti bildirimi: %d eligible", eligible_count)
|
| 762 |
|
| 763 |
+
was_running = is_running
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 764 |
|
| 765 |
+
except Exception as e:
|
| 766 |
+
logger.debug("[scan_watcher] API hatası (normal başlangıçta): %s", e)
|
| 767 |
|
| 768 |
+
time.sleep(CHECK_INTERVAL)
|
|
|
|
|
|
|
| 769 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 770 |
|
| 771 |
+
def _us_scan_watcher() -> None:
|
| 772 |
+
"""US market scan watcher — proactive notifications."""
|
| 773 |
+
LOCAL_API = "http://localhost:7860/api/eligible?market=us"
|
| 774 |
+
CHECK_INTERVAL = 60
|
| 775 |
|
| 776 |
+
was_running = False
|
| 777 |
+
initialized = False
|
| 778 |
+
logger.info("[us_scan_watcher] Started — %ds interval", CHECK_INTERVAL)
|
|
|
|
| 779 |
|
| 780 |
+
while True:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 781 |
try:
|
| 782 |
+
req = urllib.request.Request(LOCAL_API, headers={"Accept": "application/json"})
|
| 783 |
+
with urllib.request.urlopen(req, timeout=10) as resp:
|
| 784 |
+
data = json.loads(resp.read().decode())
|
| 785 |
+
|
| 786 |
+
is_running = bool(data.get("scanRunning", False))
|
| 787 |
+
|
| 788 |
+
if not initialized:
|
| 789 |
+
was_running = is_running
|
| 790 |
+
initialized = True
|
| 791 |
+
time.sleep(CHECK_INTERVAL)
|
| 792 |
+
continue
|
| 793 |
+
|
| 794 |
+
if not was_running and is_running:
|
| 795 |
+
send("🇺🇸🔍 <b>US Tarama başladı</b>\n⏳ Tamamlanması birkaç saat sürebilir.")
|
| 796 |
+
logger.info("[us_scan_watcher] US scan started notification sent")
|
| 797 |
+
|
| 798 |
+
elif was_running and not is_running:
|
| 799 |
+
eligible_count = data.get("eligibleCount") or 0
|
| 800 |
+
progress = data.get("scanProgress", "")
|
| 801 |
+
if "hata" in progress.lower() or "error" in progress.lower():
|
| 802 |
+
send(f"🇺🇸❌ <b>US Tarama hatası</b>\n<code>{progress}</code>")
|
| 803 |
+
else:
|
| 804 |
+
updated = data.get("updatedAt", "")[:16] if data.get("updatedAt") else "?"
|
| 805 |
+
send(
|
| 806 |
+
f"🇺🇸✅ <b>US Tarama tamamlandı</b>\n"
|
| 807 |
+
f"Eligible: <b>{eligible_count}</b> hisse\n"
|
| 808 |
+
f"Tamamlandı: <code>{updated} UTC</code>\n"
|
| 809 |
+
f"/us_hisseler — listeyi görmek için"
|
| 810 |
+
)
|
| 811 |
+
logger.info("[us_scan_watcher] US scan done: %d eligible", eligible_count)
|
| 812 |
+
|
| 813 |
+
was_running = is_running
|
| 814 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 815 |
except Exception as e:
|
| 816 |
+
logger.debug("[us_scan_watcher] API error: %s", e)
|
| 817 |
+
|
| 818 |
+
time.sleep(CHECK_INTERVAL)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 819 |
|
| 820 |
|
| 821 |
+
# ---------------------------------------------------------------------------
|
| 822 |
+
# Main polling loop
|
| 823 |
+
# ---------------------------------------------------------------------------
|
| 824 |
+
|
| 825 |
+
COMMANDS = {
|
| 826 |
+
"/durum": cmd_durum,
|
| 827 |
+
"/portfoy": cmd_portfoy,
|
| 828 |
+
"/hisseler": cmd_hisseler,
|
| 829 |
+
"/sinyaller": cmd_sinyaller,
|
| 830 |
+
"/us_durum": cmd_us_durum,
|
| 831 |
+
"/us_portfoy": cmd_us_portfoy,
|
| 832 |
+
"/us_hisseler": cmd_us_hisseler,
|
| 833 |
+
"/us_sinyaller": cmd_us_sinyaller,
|
| 834 |
+
"/yardim": cmd_yardim,
|
| 835 |
+
"/help": cmd_yardim,
|
| 836 |
+
"/start": cmd_yardim,
|
| 837 |
+
}
|
| 838 |
+
|
| 839 |
|
| 840 |
def main() -> None:
|
| 841 |
+
import threading
|
| 842 |
+
_resolve_api_base(force=True)
|
| 843 |
logger.info("Telegram bot başlatıldı (chat_id=%s)", CHAT_ID)
|
| 844 |
+
send("🤖 Borsanova Bot aktif!\n/yardim — komutları görmek için")
|
| 845 |
+
|
| 846 |
+
# Scan watcher arka planda başlat
|
| 847 |
+
watcher = threading.Thread(target=_scan_watcher, daemon=True, name="scan-watcher")
|
| 848 |
+
watcher.start()
|
| 849 |
+
|
| 850 |
+
# US scan watcher
|
| 851 |
+
us_watcher = threading.Thread(target=_us_scan_watcher, daemon=True, name="us-scan-watcher")
|
| 852 |
+
us_watcher.start()
|
| 853 |
|
| 854 |
offset = 0
|
| 855 |
while True:
|
|
|
|
| 862 |
if str(msg.get("chat", {}).get("id", "")) != str(CHAT_ID):
|
| 863 |
continue
|
| 864 |
|
| 865 |
+
text = msg.get("text", "").strip().lower().split()[0] if msg.get("text") else ""
|
| 866 |
if not text:
|
| 867 |
continue
|
| 868 |
|
| 869 |
logger.info("Komut: %s", text)
|
| 870 |
+
handler = COMMANDS.get(text)
|
| 871 |
+
if handler:
|
| 872 |
+
try:
|
| 873 |
+
reply = handler()
|
| 874 |
+
send(reply)
|
| 875 |
+
except Exception as e:
|
| 876 |
+
logger.error("Handler hatası (%s): %s", text, e)
|
| 877 |
+
send(f"❌ Komut işlenirken hata: {e}")
|
| 878 |
+
else:
|
| 879 |
+
send(
|
| 880 |
+
f"❓ Bilinmeyen komut: <code>{text}</code>\n"
|
| 881 |
+
"/yardim — komut listesi"
|
| 882 |
+
)
|
| 883 |
|
| 884 |
if not updates:
|
| 885 |
time.sleep(1)
|
huggingface-space/test_smoke.py
CHANGED
|
@@ -155,7 +155,7 @@ def main() -> int:
|
|
| 155 |
from trading.security import LogSanitizer, check_env_safety, get_production_config
|
| 156 |
sanitizer = LogSanitizer()
|
| 157 |
# Test redaction
|
| 158 |
-
test_msg = "key=
|
| 159 |
sanitized = sanitizer._sanitize(test_msg)
|
| 160 |
assert "sk-abc123" not in sanitized, f"Key not redacted: {sanitized}"
|
| 161 |
assert "REDACTED" in sanitized
|
|
|
|
| 155 |
from trading.security import LogSanitizer, check_env_safety, get_production_config
|
| 156 |
sanitizer = LogSanitizer()
|
| 157 |
# Test redaction
|
| 158 |
+
test_msg = "key=sk-abc123def456ghi789jkl012mno345pqr678"
|
| 159 |
sanitized = sanitizer._sanitize(test_msg)
|
| 160 |
assert "sk-abc123" not in sanitized, f"Key not redacted: {sanitized}"
|
| 161 |
assert "REDACTED" in sanitized
|
huggingface-space/trading/__init__.py
CHANGED
|
@@ -7,6 +7,7 @@ from trading.broker_base import ( # noqa: F401
|
|
| 7 |
OrderType,
|
| 8 |
PaperBroker,
|
| 9 |
SlippageModel,
|
|
|
|
| 10 |
)
|
| 11 |
from trading.monitoring import ( # noqa: F401
|
| 12 |
AlertLevel,
|
|
|
|
| 7 |
OrderType,
|
| 8 |
PaperBroker,
|
| 9 |
SlippageModel,
|
| 10 |
+
make_slippage_model,
|
| 11 |
)
|
| 12 |
from trading.monitoring import ( # noqa: F401
|
| 13 |
AlertLevel,
|
huggingface-space/trading/auto_trader.py
CHANGED
|
@@ -45,20 +45,19 @@ from trading.broker_base import (
|
|
| 45 |
OrderType,
|
| 46 |
PaperBroker,
|
| 47 |
SlippageModel,
|
| 48 |
-
|
| 49 |
)
|
| 50 |
from trading.trade_journal import TradeJournal
|
| 51 |
from trading.monitoring import KillSwitch
|
| 52 |
|
| 53 |
# New production modules
|
| 54 |
-
from trading.risk_gate import RiskGate, RiskLimits
|
| 55 |
from trading.circuit_breaker import CircuitBreaker
|
| 56 |
from trading.db_store import TradingStore
|
| 57 |
from trading.model_risk import ModelRiskManager
|
| 58 |
from trading.market_registry import (
|
| 59 |
DEFAULT_MARKET_ID,
|
| 60 |
describe_market_window,
|
| 61 |
-
get_market_config,
|
| 62 |
get_market_storage_dir,
|
| 63 |
get_scan_results_path,
|
| 64 |
get_trading_db_path,
|
|
@@ -90,7 +89,7 @@ def _status_file(market_id: str = DEFAULT_MARKET_ID) -> Path:
|
|
| 90 |
DEFAULT_INITIAL_CASH = 100_000.0
|
| 91 |
MAX_POSITIONS = 5
|
| 92 |
POSITION_SIZE_PCT = 15.0 # % of equity per position
|
| 93 |
-
MIN_CONFIDENCE =
|
| 94 |
DAYS_AHEAD = 7
|
| 95 |
STOP_LOSS_PCT = 5.0 # -5% stop loss
|
| 96 |
TAKE_PROFIT_PCT = 10.0 # +10% take profit
|
|
@@ -237,7 +236,6 @@ def run_trading_cycle(
|
|
| 237 |
"""
|
| 238 |
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 239 |
cycle_start = time.time()
|
| 240 |
-
_currency = get_market_config(market_id).currency
|
| 241 |
|
| 242 |
journal = TradeJournal()
|
| 243 |
kill_switch = KillSwitch(path=get_market_storage_dir(market_id) / ".kill_switch")
|
|
@@ -247,7 +245,7 @@ def run_trading_cycle(
|
|
| 247 |
store = TradingStore(db_path=str(get_trading_db_path(market_id)))
|
| 248 |
model_risk = ModelRiskManager()
|
| 249 |
risk_gate = RiskGate(
|
| 250 |
-
limits=
|
| 251 |
monitor=None,
|
| 252 |
kill_switch=kill_switch,
|
| 253 |
alert_manager=None,
|
|
@@ -287,17 +285,13 @@ def run_trading_cycle(
|
|
| 287 |
effective_cash = saved_cash if saved_cash is not None else initial_cash
|
| 288 |
broker = PaperBroker(
|
| 289 |
initial_cash=effective_cash,
|
| 290 |
-
slippage_model=
|
| 291 |
market_id=market_id,
|
| 292 |
)
|
| 293 |
if saved_cash is not None:
|
| 294 |
broker._cash = saved_cash
|
| 295 |
broker._positions = state.get("broker_positions", {})
|
| 296 |
|
| 297 |
-
# Seed peak equity so drawdown protection is active from the start
|
| 298 |
-
_initial_account = broker.get_account_info()
|
| 299 |
-
risk_gate.record_equity(_initial_account["equity"])
|
| 300 |
-
|
| 301 |
# Get eligible symbols
|
| 302 |
symbols = _get_eligible_symbols(market_id=market_id)
|
| 303 |
if not symbols:
|
|
@@ -310,7 +304,7 @@ def run_trading_cycle(
|
|
| 310 |
logger.info("=" * 60)
|
| 311 |
logger.info("AUTO TRADER — Trading Cycle %s", today)
|
| 312 |
logger.info("Eligible stocks: %s", symbols)
|
| 313 |
-
logger.info("Cash: %.2f
|
| 314 |
logger.info("=" * 60)
|
| 315 |
|
| 316 |
# Update status
|
|
@@ -397,6 +391,7 @@ def run_trading_cycle(
|
|
| 397 |
for sym, pos in list(broker._positions.items()):
|
| 398 |
try:
|
| 399 |
from data.stock_data_api import get_stock_data_for_api
|
|
|
|
| 400 |
df_px = get_stock_data_for_api(sym, period="5d", interval="1d", market_id=market_id)
|
| 401 |
if df_px is None or df_px.empty:
|
| 402 |
continue
|
|
@@ -494,9 +489,9 @@ def run_trading_cycle(
|
|
| 494 |
"commission": round(fill.commission, 2),
|
| 495 |
})
|
| 496 |
logger.info(
|
| 497 |
-
"SOLD %s: %d @ %.2f
|
| 498 |
sym, fill.filled_qty, fill.avg_fill_price,
|
| 499 |
-
|
| 500 |
)
|
| 501 |
else:
|
| 502 |
signal_actions[sym] = "SELL_REJECTED"
|
|
@@ -555,6 +550,7 @@ def run_trading_cycle(
|
|
| 555 |
from data.stock_data_api import get_stock_data_for_api as _fetch_px
|
| 556 |
for _ex_sym in broker._positions:
|
| 557 |
try:
|
|
|
|
| 558 |
_ex_df = _fetch_px(_ex_sym, period="6mo", interval="1d", market_id=market_id)
|
| 559 |
if _ex_df is not None and not _ex_df.empty:
|
| 560 |
_position_returns[_ex_sym] = _ex_df["Close"].pct_change().dropna()
|
|
@@ -563,6 +559,8 @@ def run_trading_cycle(
|
|
| 563 |
if _position_returns:
|
| 564 |
logger.info("Correlation check: cached returns for %d positions", len(_position_returns))
|
| 565 |
|
|
|
|
|
|
|
| 566 |
for _, row in buy_signals.head(slots_available).iterrows():
|
| 567 |
sym = row["symbol"]
|
| 568 |
if sym in broker._positions:
|
|
@@ -578,6 +576,7 @@ def run_trading_cycle(
|
|
| 578 |
if _position_returns:
|
| 579 |
try:
|
| 580 |
from data.stock_data_api import get_stock_data_for_api as _fetch_px2
|
|
|
|
| 581 |
_new_df = _fetch_px2(sym, period="6mo", interval="1d", market_id=market_id)
|
| 582 |
_too_corr = False
|
| 583 |
if _new_df is not None and not _new_df.empty:
|
|
@@ -594,7 +593,6 @@ def run_trading_cycle(
|
|
| 594 |
)
|
| 595 |
break
|
| 596 |
if _too_corr:
|
| 597 |
-
signal_actions[sym] = "BUY_SKIPPED_CORRELATED"
|
| 598 |
continue
|
| 599 |
except Exception as exc:
|
| 600 |
logger.warning("Correlation check failed for %s: %s", sym, exc)
|
|
@@ -606,7 +604,6 @@ def run_trading_cycle(
|
|
| 606 |
qty = int(max_notional / close_px)
|
| 607 |
|
| 608 |
if qty <= 0:
|
| 609 |
-
signal_actions[sym] = "BUY_SKIPPED_ZERO_QTY"
|
| 610 |
continue
|
| 611 |
|
| 612 |
if not dry_run:
|
|
@@ -643,6 +640,7 @@ def run_trading_cycle(
|
|
| 643 |
|
| 644 |
# Update correlation cache so next candidates check against this too
|
| 645 |
try:
|
|
|
|
| 646 |
_new_df2 = _fetch_px(sym, period="6mo", interval="1d", market_id=market_id)
|
| 647 |
if _new_df2 is not None and not _new_df2.empty:
|
| 648 |
_position_returns[sym] = _new_df2["Close"].pct_change().dropna()
|
|
@@ -681,9 +679,9 @@ def run_trading_cycle(
|
|
| 681 |
"commission": round(fill.commission, 2),
|
| 682 |
})
|
| 683 |
logger.info(
|
| 684 |
-
"BOUGHT %s: %d @ %.2f
|
| 685 |
sym, fill.filled_qty, fill.avg_fill_price,
|
| 686 |
-
|
| 687 |
)
|
| 688 |
else:
|
| 689 |
signal_actions[sym] = "BUY_REJECTED"
|
|
@@ -744,6 +742,7 @@ def run_trading_cycle(
|
|
| 744 |
# Final status
|
| 745 |
result = {
|
| 746 |
"date": today,
|
|
|
|
| 747 |
"status": "OK",
|
| 748 |
"elapsed_sec": round(elapsed, 1),
|
| 749 |
"signals_generated": len(df_signals),
|
|
@@ -800,10 +799,7 @@ def run_trading_cycle(
|
|
| 800 |
# Continuous mode
|
| 801 |
# ---------------------------------------------------------------------------
|
| 802 |
|
| 803 |
-
def run_continuous(
|
| 804 |
-
initial_cash: float = DEFAULT_INITIAL_CASH,
|
| 805 |
-
market_id: str = DEFAULT_MARKET_ID,
|
| 806 |
-
) -> None:
|
| 807 |
"""Run auto-trader in continuous loop.
|
| 808 |
|
| 809 |
Checks every hour, trades once per day.
|
|
@@ -811,15 +807,13 @@ def run_continuous(
|
|
| 811 |
"""
|
| 812 |
logger.info("Starting continuous auto-trader...")
|
| 813 |
logger.info("Will check every hour, trade once per day")
|
|
|
|
| 814 |
logger.info("Press Ctrl+C to stop")
|
| 815 |
|
| 816 |
while True:
|
| 817 |
try:
|
| 818 |
now = datetime.now(timezone.utc)
|
| 819 |
-
|
| 820 |
-
|
| 821 |
-
# Trade between 07:00-16:00 UTC (10:00-19:00 Istanbul)
|
| 822 |
-
if is_market_open_window(market_id):
|
| 823 |
state = _load_state(market_id)
|
| 824 |
today = now.strftime("%Y-%m-%d")
|
| 825 |
|
|
@@ -830,7 +824,7 @@ def run_continuous(
|
|
| 830 |
else:
|
| 831 |
logger.debug("Already ran today, waiting...")
|
| 832 |
else:
|
| 833 |
-
logger.debug("Outside market
|
| 834 |
|
| 835 |
# Sleep 1 hour
|
| 836 |
time.sleep(3600)
|
|
@@ -847,12 +841,10 @@ def run_continuous(
|
|
| 847 |
# Reset
|
| 848 |
# ---------------------------------------------------------------------------
|
| 849 |
|
| 850 |
-
def reset_portfolio(
|
| 851 |
-
initial_cash: float = DEFAULT_INITIAL_CASH,
|
| 852 |
-
market_id: str = DEFAULT_MARKET_ID,
|
| 853 |
-
) -> None:
|
| 854 |
"""Reset all auto-trader state for a fresh start."""
|
| 855 |
state = {
|
|
|
|
| 856 |
"created_at": datetime.now(timezone.utc).isoformat(),
|
| 857 |
"initial_cash": initial_cash,
|
| 858 |
"broker_cash": initial_cash,
|
|
@@ -865,8 +857,9 @@ def reset_portfolio(
|
|
| 865 |
}
|
| 866 |
_save_state(state, market_id=market_id)
|
| 867 |
|
| 868 |
-
# Clear journal files
|
| 869 |
-
|
|
|
|
| 870 |
for f in journal_dir.glob("*.jsonl"):
|
| 871 |
try:
|
| 872 |
f.unlink()
|
|
@@ -875,7 +868,7 @@ def reset_portfolio(
|
|
| 875 |
# appear/disappear; ignore missing file errors during cleanup.
|
| 876 |
continue
|
| 877 |
|
| 878 |
-
logger.info("Portfolio reset: %.0f
|
| 879 |
|
| 880 |
|
| 881 |
# ---------------------------------------------------------------------------
|
|
@@ -895,12 +888,12 @@ def main() -> int:
|
|
| 895 |
p.add_argument("--reset", action="store_true", help="Reset portfolio to fresh start")
|
| 896 |
p.add_argument("--initial-cash", type=float, default=DEFAULT_INITIAL_CASH)
|
| 897 |
p.add_argument("--dry-run", action="store_true", help="Generate signals but don't trade")
|
| 898 |
-
p.add_argument("--market",
|
| 899 |
|
| 900 |
args = p.parse_args()
|
| 901 |
|
| 902 |
if args.status:
|
| 903 |
-
status = get_auto_trader_status(args.market)
|
| 904 |
print(json.dumps(status, indent=2, default=str))
|
| 905 |
|
| 906 |
# Also show performance
|
|
@@ -913,7 +906,7 @@ def main() -> int:
|
|
| 913 |
|
| 914 |
if args.reset:
|
| 915 |
reset_portfolio(args.initial_cash, market_id=args.market)
|
| 916 |
-
print(f"Portfolio reset with {args.initial_cash:,.0f}
|
| 917 |
return 0
|
| 918 |
|
| 919 |
if args.continuous:
|
|
@@ -930,7 +923,7 @@ def main() -> int:
|
|
| 930 |
return 0
|
| 931 |
|
| 932 |
# Default: show status
|
| 933 |
-
status = get_auto_trader_status(args.market)
|
| 934 |
print(json.dumps(status, indent=2, default=str))
|
| 935 |
return 0
|
| 936 |
|
|
|
|
| 45 |
OrderType,
|
| 46 |
PaperBroker,
|
| 47 |
SlippageModel,
|
| 48 |
+
make_slippage_model,
|
| 49 |
)
|
| 50 |
from trading.trade_journal import TradeJournal
|
| 51 |
from trading.monitoring import KillSwitch
|
| 52 |
|
| 53 |
# New production modules
|
| 54 |
+
from trading.risk_gate import RiskGate, RiskLimits
|
| 55 |
from trading.circuit_breaker import CircuitBreaker
|
| 56 |
from trading.db_store import TradingStore
|
| 57 |
from trading.model_risk import ModelRiskManager
|
| 58 |
from trading.market_registry import (
|
| 59 |
DEFAULT_MARKET_ID,
|
| 60 |
describe_market_window,
|
|
|
|
| 61 |
get_market_storage_dir,
|
| 62 |
get_scan_results_path,
|
| 63 |
get_trading_db_path,
|
|
|
|
| 89 |
DEFAULT_INITIAL_CASH = 100_000.0
|
| 90 |
MAX_POSITIONS = 5
|
| 91 |
POSITION_SIZE_PCT = 15.0 # % of equity per position
|
| 92 |
+
MIN_CONFIDENCE = 40 # 0-100 scale — minimum quality gate for trade entry
|
| 93 |
DAYS_AHEAD = 7
|
| 94 |
STOP_LOSS_PCT = 5.0 # -5% stop loss
|
| 95 |
TAKE_PROFIT_PCT = 10.0 # +10% take profit
|
|
|
|
| 236 |
"""
|
| 237 |
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 238 |
cycle_start = time.time()
|
|
|
|
| 239 |
|
| 240 |
journal = TradeJournal()
|
| 241 |
kill_switch = KillSwitch(path=get_market_storage_dir(market_id) / ".kill_switch")
|
|
|
|
| 245 |
store = TradingStore(db_path=str(get_trading_db_path(market_id)))
|
| 246 |
model_risk = ModelRiskManager()
|
| 247 |
risk_gate = RiskGate(
|
| 248 |
+
limits=RiskLimits(),
|
| 249 |
monitor=None,
|
| 250 |
kill_switch=kill_switch,
|
| 251 |
alert_manager=None,
|
|
|
|
| 285 |
effective_cash = saved_cash if saved_cash is not None else initial_cash
|
| 286 |
broker = PaperBroker(
|
| 287 |
initial_cash=effective_cash,
|
| 288 |
+
slippage_model=make_slippage_model(market_id),
|
| 289 |
market_id=market_id,
|
| 290 |
)
|
| 291 |
if saved_cash is not None:
|
| 292 |
broker._cash = saved_cash
|
| 293 |
broker._positions = state.get("broker_positions", {})
|
| 294 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 295 |
# Get eligible symbols
|
| 296 |
symbols = _get_eligible_symbols(market_id=market_id)
|
| 297 |
if not symbols:
|
|
|
|
| 304 |
logger.info("=" * 60)
|
| 305 |
logger.info("AUTO TRADER — Trading Cycle %s", today)
|
| 306 |
logger.info("Eligible stocks: %s", symbols)
|
| 307 |
+
logger.info("Cash: %.2f, Positions: %d, Market: %s", broker._cash, len(broker._positions), market_id)
|
| 308 |
logger.info("=" * 60)
|
| 309 |
|
| 310 |
# Update status
|
|
|
|
| 391 |
for sym, pos in list(broker._positions.items()):
|
| 392 |
try:
|
| 393 |
from data.stock_data_api import get_stock_data_for_api
|
| 394 |
+
ticker = to_provider_symbol(sym, market_id=market_id)
|
| 395 |
df_px = get_stock_data_for_api(sym, period="5d", interval="1d", market_id=market_id)
|
| 396 |
if df_px is None or df_px.empty:
|
| 397 |
continue
|
|
|
|
| 489 |
"commission": round(fill.commission, 2),
|
| 490 |
})
|
| 491 |
logger.info(
|
| 492 |
+
"SOLD %s: %d @ %.2f TL, PnL: %+.2f TL (%+.1f%%) [%s]",
|
| 493 |
sym, fill.filled_qty, fill.avg_fill_price,
|
| 494 |
+
pnl, pnl_pct, reason,
|
| 495 |
)
|
| 496 |
else:
|
| 497 |
signal_actions[sym] = "SELL_REJECTED"
|
|
|
|
| 550 |
from data.stock_data_api import get_stock_data_for_api as _fetch_px
|
| 551 |
for _ex_sym in broker._positions:
|
| 552 |
try:
|
| 553 |
+
_ex_tk = to_provider_symbol(_ex_sym, market_id=market_id)
|
| 554 |
_ex_df = _fetch_px(_ex_sym, period="6mo", interval="1d", market_id=market_id)
|
| 555 |
if _ex_df is not None and not _ex_df.empty:
|
| 556 |
_position_returns[_ex_sym] = _ex_df["Close"].pct_change().dropna()
|
|
|
|
| 559 |
if _position_returns:
|
| 560 |
logger.info("Correlation check: cached returns for %d positions", len(_position_returns))
|
| 561 |
|
| 562 |
+
_fetch_px2 = None
|
| 563 |
+
|
| 564 |
for _, row in buy_signals.head(slots_available).iterrows():
|
| 565 |
sym = row["symbol"]
|
| 566 |
if sym in broker._positions:
|
|
|
|
| 576 |
if _position_returns:
|
| 577 |
try:
|
| 578 |
from data.stock_data_api import get_stock_data_for_api as _fetch_px2
|
| 579 |
+
_new_tk = to_provider_symbol(sym, market_id=market_id)
|
| 580 |
_new_df = _fetch_px2(sym, period="6mo", interval="1d", market_id=market_id)
|
| 581 |
_too_corr = False
|
| 582 |
if _new_df is not None and not _new_df.empty:
|
|
|
|
| 593 |
)
|
| 594 |
break
|
| 595 |
if _too_corr:
|
|
|
|
| 596 |
continue
|
| 597 |
except Exception as exc:
|
| 598 |
logger.warning("Correlation check failed for %s: %s", sym, exc)
|
|
|
|
| 604 |
qty = int(max_notional / close_px)
|
| 605 |
|
| 606 |
if qty <= 0:
|
|
|
|
| 607 |
continue
|
| 608 |
|
| 609 |
if not dry_run:
|
|
|
|
| 640 |
|
| 641 |
# Update correlation cache so next candidates check against this too
|
| 642 |
try:
|
| 643 |
+
_new_tk2 = to_provider_symbol(sym, market_id=market_id)
|
| 644 |
_new_df2 = _fetch_px(sym, period="6mo", interval="1d", market_id=market_id)
|
| 645 |
if _new_df2 is not None and not _new_df2.empty:
|
| 646 |
_position_returns[sym] = _new_df2["Close"].pct_change().dropna()
|
|
|
|
| 679 |
"commission": round(fill.commission, 2),
|
| 680 |
})
|
| 681 |
logger.info(
|
| 682 |
+
"BOUGHT %s: %d @ %.2f TL (conf=%.2f, pred=%.1f%%)",
|
| 683 |
sym, fill.filled_qty, fill.avg_fill_price,
|
| 684 |
+
row.get("confidence", 0), row.get("predicted_return_pct", 0),
|
| 685 |
)
|
| 686 |
else:
|
| 687 |
signal_actions[sym] = "BUY_REJECTED"
|
|
|
|
| 742 |
# Final status
|
| 743 |
result = {
|
| 744 |
"date": today,
|
| 745 |
+
"market_id": market_id,
|
| 746 |
"status": "OK",
|
| 747 |
"elapsed_sec": round(elapsed, 1),
|
| 748 |
"signals_generated": len(df_signals),
|
|
|
|
| 799 |
# Continuous mode
|
| 800 |
# ---------------------------------------------------------------------------
|
| 801 |
|
| 802 |
+
def run_continuous(initial_cash: float = DEFAULT_INITIAL_CASH, market_id: str = DEFAULT_MARKET_ID) -> None:
|
|
|
|
|
|
|
|
|
|
| 803 |
"""Run auto-trader in continuous loop.
|
| 804 |
|
| 805 |
Checks every hour, trades once per day.
|
|
|
|
| 807 |
"""
|
| 808 |
logger.info("Starting continuous auto-trader...")
|
| 809 |
logger.info("Will check every hour, trade once per day")
|
| 810 |
+
logger.info("Market window: %s", describe_market_window(market_id))
|
| 811 |
logger.info("Press Ctrl+C to stop")
|
| 812 |
|
| 813 |
while True:
|
| 814 |
try:
|
| 815 |
now = datetime.now(timezone.utc)
|
| 816 |
+
if is_market_open_window(market_id, now_utc=now):
|
|
|
|
|
|
|
|
|
|
| 817 |
state = _load_state(market_id)
|
| 818 |
today = now.strftime("%Y-%m-%d")
|
| 819 |
|
|
|
|
| 824 |
else:
|
| 825 |
logger.debug("Already ran today, waiting...")
|
| 826 |
else:
|
| 827 |
+
logger.debug("Outside market hours for %s, sleeping...", market_id)
|
| 828 |
|
| 829 |
# Sleep 1 hour
|
| 830 |
time.sleep(3600)
|
|
|
|
| 841 |
# Reset
|
| 842 |
# ---------------------------------------------------------------------------
|
| 843 |
|
| 844 |
+
def reset_portfolio(initial_cash: float = DEFAULT_INITIAL_CASH, market_id: str = DEFAULT_MARKET_ID) -> None:
|
|
|
|
|
|
|
|
|
|
| 845 |
"""Reset all auto-trader state for a fresh start."""
|
| 846 |
state = {
|
| 847 |
+
"market_id": market_id,
|
| 848 |
"created_at": datetime.now(timezone.utc).isoformat(),
|
| 849 |
"initial_cash": initial_cash,
|
| 850 |
"broker_cash": initial_cash,
|
|
|
|
| 857 |
}
|
| 858 |
_save_state(state, market_id=market_id)
|
| 859 |
|
| 860 |
+
# Clear journal files for the correct market
|
| 861 |
+
from trading.trade_journal import _journal_dir_for
|
| 862 |
+
journal_dir = _journal_dir_for(market_id)
|
| 863 |
for f in journal_dir.glob("*.jsonl"):
|
| 864 |
try:
|
| 865 |
f.unlink()
|
|
|
|
| 868 |
# appear/disappear; ignore missing file errors during cleanup.
|
| 869 |
continue
|
| 870 |
|
| 871 |
+
logger.info("Portfolio reset for %s: %.0f initial cash", market_id, initial_cash)
|
| 872 |
|
| 873 |
|
| 874 |
# ---------------------------------------------------------------------------
|
|
|
|
| 888 |
p.add_argument("--reset", action="store_true", help="Reset portfolio to fresh start")
|
| 889 |
p.add_argument("--initial-cash", type=float, default=DEFAULT_INITIAL_CASH)
|
| 890 |
p.add_argument("--dry-run", action="store_true", help="Generate signals but don't trade")
|
| 891 |
+
p.add_argument("--market", choices=["bist", "us"], default=DEFAULT_MARKET_ID)
|
| 892 |
|
| 893 |
args = p.parse_args()
|
| 894 |
|
| 895 |
if args.status:
|
| 896 |
+
status = get_auto_trader_status(market_id=args.market)
|
| 897 |
print(json.dumps(status, indent=2, default=str))
|
| 898 |
|
| 899 |
# Also show performance
|
|
|
|
| 906 |
|
| 907 |
if args.reset:
|
| 908 |
reset_portfolio(args.initial_cash, market_id=args.market)
|
| 909 |
+
print(f"Portfolio reset for {args.market} with {args.initial_cash:,.0f}")
|
| 910 |
return 0
|
| 911 |
|
| 912 |
if args.continuous:
|
|
|
|
| 923 |
return 0
|
| 924 |
|
| 925 |
# Default: show status
|
| 926 |
+
status = get_auto_trader_status(market_id=args.market)
|
| 927 |
print(json.dumps(status, indent=2, default=str))
|
| 928 |
return 0
|
| 929 |
|
huggingface-space/trading/broker_base.py
CHANGED
|
@@ -98,7 +98,7 @@ class FillResult:
|
|
| 98 |
# ---------------------------------------------------------------------------
|
| 99 |
|
| 100 |
class SlippageModel:
|
| 101 |
-
"""Realistic slippage and commission estimation
|
| 102 |
|
| 103 |
Parameters
|
| 104 |
----------
|
|
@@ -106,6 +106,7 @@ class SlippageModel:
|
|
| 106 |
Round-trip commission as fraction (e.g. 0.001 = 0.1% = 10 bps).
|
| 107 |
bsmv_rate : float
|
| 108 |
"Banka ve Sigorta Muameleleri Vergisi" (BSMV) applied on commission.
|
|
|
|
| 109 |
min_slippage_bps : float
|
| 110 |
Minimum market-impact slippage in basis points.
|
| 111 |
vol_slippage_coeff : float
|
|
@@ -161,16 +162,26 @@ class SlippageModel:
|
|
| 161 |
return round(mid_price * (1 - slip_frac), 4)
|
| 162 |
|
| 163 |
|
| 164 |
-
def
|
| 165 |
-
"""
|
| 166 |
|
| 167 |
-
BIST: 10 bps commission + 5% BSMV
|
| 168 |
-
US
|
| 169 |
"""
|
| 170 |
-
if
|
| 171 |
-
return SlippageModel(
|
| 172 |
-
|
| 173 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 174 |
|
| 175 |
|
| 176 |
# ---------------------------------------------------------------------------
|
|
|
|
| 98 |
# ---------------------------------------------------------------------------
|
| 99 |
|
| 100 |
class SlippageModel:
|
| 101 |
+
"""Realistic slippage and commission estimation.
|
| 102 |
|
| 103 |
Parameters
|
| 104 |
----------
|
|
|
|
| 106 |
Round-trip commission as fraction (e.g. 0.001 = 0.1% = 10 bps).
|
| 107 |
bsmv_rate : float
|
| 108 |
"Banka ve Sigorta Muameleleri Vergisi" (BSMV) applied on commission.
|
| 109 |
+
Only applies to BIST (Turkey). Set to 0.0 for other markets.
|
| 110 |
min_slippage_bps : float
|
| 111 |
Minimum market-impact slippage in basis points.
|
| 112 |
vol_slippage_coeff : float
|
|
|
|
| 162 |
return round(mid_price * (1 - slip_frac), 4)
|
| 163 |
|
| 164 |
|
| 165 |
+
def make_slippage_model(market_id: str = "bist") -> SlippageModel:
|
| 166 |
+
"""Create a market-appropriate SlippageModel.
|
| 167 |
|
| 168 |
+
BIST: 10 bps commission + 5% BSMV tax on commission.
|
| 169 |
+
US: 3 bps commission, no BSMV (US has no such tax).
|
| 170 |
"""
|
| 171 |
+
if market_id == "us":
|
| 172 |
+
return SlippageModel(
|
| 173 |
+
commission_rate=0.0003, # ~3 bps (typical US retail)
|
| 174 |
+
bsmv_rate=0.0, # No BSMV in US
|
| 175 |
+
min_slippage_bps=2.0, # US large-caps are more liquid
|
| 176 |
+
vol_slippage_coeff=0.2,
|
| 177 |
+
)
|
| 178 |
+
# Default: BIST
|
| 179 |
+
return SlippageModel(
|
| 180 |
+
commission_rate=0.001,
|
| 181 |
+
bsmv_rate=0.05,
|
| 182 |
+
min_slippage_bps=5.0,
|
| 183 |
+
vol_slippage_coeff=0.3,
|
| 184 |
+
)
|
| 185 |
|
| 186 |
|
| 187 |
# ---------------------------------------------------------------------------
|
huggingface-space/trading/circuit_breaker.py
CHANGED
|
@@ -188,6 +188,7 @@ class CircuitBreaker:
|
|
| 188 |
|
| 189 |
if old != new_state:
|
| 190 |
logger.warning("Circuit breaker: %s → %s", old.value, new_state.value)
|
|
|
|
| 191 |
|
| 192 |
def get_status(self) -> Dict[str, Any]:
|
| 193 |
"""Current status for dashboard."""
|
|
|
|
| 188 |
|
| 189 |
if old != new_state:
|
| 190 |
logger.warning("Circuit breaker: %s → %s", old.value, new_state.value)
|
| 191 |
+
self._save_state()
|
| 192 |
|
| 193 |
def get_status(self) -> Dict[str, Any]:
|
| 194 |
"""Current status for dashboard."""
|
huggingface-space/trading/daily_signals.py
CHANGED
|
@@ -51,6 +51,7 @@ from trading.market_registry import (
|
|
| 51 |
normalize_symbol,
|
| 52 |
to_provider_symbol,
|
| 53 |
)
|
|
|
|
| 54 |
|
| 55 |
warnings.filterwarnings("ignore")
|
| 56 |
|
|
@@ -113,10 +114,15 @@ def generate_signals(
|
|
| 113 |
|
| 114 |
def _process_one(sym: str) -> Optional[Dict[str, Any]]:
|
| 115 |
"""V3 Classification Ensemble — matches walk-forward backtest approach."""
|
|
|
|
|
|
|
| 116 |
try:
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
|
|
|
|
|
|
|
|
|
| 120 |
if df is None or df.empty:
|
| 121 |
logger.info("%s: no data returned from API — skipped", symbol)
|
| 122 |
return None
|
|
@@ -312,15 +318,15 @@ def generate_signals(
|
|
| 312 |
"top_features": json.dumps(top_features),
|
| 313 |
}
|
| 314 |
except Exception as e:
|
| 315 |
-
logger.warning("Signal generation FAILED for %s: %s",
|
| 316 |
-
logger.debug("Traceback for %s:\n%s",
|
| 317 |
# Return a minimal error-HOLD so the symbol still shows up in
|
| 318 |
# signals_log and we can diagnose which symbols keep crashing.
|
| 319 |
return {
|
| 320 |
"market_id": market.market_id,
|
| 321 |
"currency": market.currency,
|
| 322 |
-
"provider_symbol":
|
| 323 |
-
"symbol":
|
| 324 |
"date": today,
|
| 325 |
"data_age_days": -1,
|
| 326 |
"data_stale": True,
|
|
@@ -345,11 +351,17 @@ def generate_signals(
|
|
| 345 |
|
| 346 |
results: List[Dict[str, Any]] = []
|
| 347 |
failed_symbols: List[str] = []
|
|
|
|
| 348 |
with ThreadPoolExecutor(max_workers=4) as ex:
|
| 349 |
futs = {ex.submit(_process_one, s): s for s in symbols}
|
| 350 |
-
for i, f in enumerate(as_completed(futs)):
|
| 351 |
sym_name = futs[f]
|
| 352 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 353 |
if r:
|
| 354 |
results.append(r)
|
| 355 |
if r.get("ml_signal") == "ERROR":
|
|
@@ -374,6 +386,8 @@ def generate_signals(
|
|
| 374 |
error_count = int((df["ml_signal"] == "ERROR").sum()) if "ml_signal" in df.columns else 0
|
| 375 |
summary = {
|
| 376 |
"date": today,
|
|
|
|
|
|
|
| 377 |
"symbols_processed": len(results),
|
| 378 |
"symbols_total": len(symbols),
|
| 379 |
"symbols_failed": len(failed_symbols),
|
|
@@ -384,7 +398,7 @@ def generate_signals(
|
|
| 384 |
"actionable_signals": int(df["actionable"].sum()),
|
| 385 |
"avg_confidence": round(float(df["confidence"].mean()), 3),
|
| 386 |
"top_picks": df[df["actionable"] & (df["final_signal"] == "BUY")].head(top_n)[
|
| 387 |
-
["symbol", "predicted_return_pct", "confidence", "final_signal"]
|
| 388 |
].to_dict("records"),
|
| 389 |
}
|
| 390 |
|
|
@@ -393,23 +407,26 @@ def generate_signals(
|
|
| 393 |
|
| 394 |
def main() -> int:
|
| 395 |
p = argparse.ArgumentParser(description="Daily signal generator")
|
| 396 |
-
p.add_argument(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 397 |
p.add_argument("--symbols", nargs="*")
|
| 398 |
p.add_argument("--days-ahead", type=int, default=7)
|
| 399 |
p.add_argument("--train-window", type=int, default=504)
|
| 400 |
p.add_argument("--top", type=int, default=10)
|
| 401 |
-
p.add_argument("--min-confidence", type=float, default=
|
| 402 |
-
p.add_argument("--out-dir")
|
| 403 |
p.add_argument("--market", default=DEFAULT_MARKET_ID, choices=["bist", "us"])
|
|
|
|
| 404 |
args = p.parse_args()
|
| 405 |
|
| 406 |
if args.universe == "custom" and args.symbols:
|
| 407 |
symbols = [s.strip().upper() for s in args.symbols]
|
| 408 |
else:
|
| 409 |
-
|
| 410 |
-
result = get_index_constituents(args.universe)
|
| 411 |
symbols = result.symbols
|
| 412 |
-
print(f"Universe {args.universe}: {len(symbols)} symbols")
|
| 413 |
|
| 414 |
print(f"Generating signals for {len(symbols)} symbols...")
|
| 415 |
df, summary = generate_signals(
|
|
|
|
| 51 |
normalize_symbol,
|
| 52 |
to_provider_symbol,
|
| 53 |
)
|
| 54 |
+
from trading.universe_provider import get_market_universe
|
| 55 |
|
| 56 |
warnings.filterwarnings("ignore")
|
| 57 |
|
|
|
|
| 114 |
|
| 115 |
def _process_one(sym: str) -> Optional[Dict[str, Any]]:
|
| 116 |
"""V3 Classification Ensemble — matches walk-forward backtest approach."""
|
| 117 |
+
symbol = normalize_symbol(sym, market_id=market.market_id)
|
| 118 |
+
provider_symbol = to_provider_symbol(symbol, market_id=market.market_id)
|
| 119 |
try:
|
| 120 |
+
df = get_stock_data_for_api(
|
| 121 |
+
symbol,
|
| 122 |
+
period="5y",
|
| 123 |
+
interval="1d",
|
| 124 |
+
market_id=market.market_id,
|
| 125 |
+
)
|
| 126 |
if df is None or df.empty:
|
| 127 |
logger.info("%s: no data returned from API — skipped", symbol)
|
| 128 |
return None
|
|
|
|
| 318 |
"top_features": json.dumps(top_features),
|
| 319 |
}
|
| 320 |
except Exception as e:
|
| 321 |
+
logger.warning("Signal generation FAILED for %s: %s", symbol, e)
|
| 322 |
+
logger.debug("Traceback for %s:\n%s", symbol, traceback.format_exc())
|
| 323 |
# Return a minimal error-HOLD so the symbol still shows up in
|
| 324 |
# signals_log and we can diagnose which symbols keep crashing.
|
| 325 |
return {
|
| 326 |
"market_id": market.market_id,
|
| 327 |
"currency": market.currency,
|
| 328 |
+
"provider_symbol": provider_symbol,
|
| 329 |
+
"symbol": symbol,
|
| 330 |
"date": today,
|
| 331 |
"data_age_days": -1,
|
| 332 |
"data_stale": True,
|
|
|
|
| 351 |
|
| 352 |
results: List[Dict[str, Any]] = []
|
| 353 |
failed_symbols: List[str] = []
|
| 354 |
+
_SIGNAL_TIMEOUT_SEC = 120 # max seconds per stock before giving up
|
| 355 |
with ThreadPoolExecutor(max_workers=4) as ex:
|
| 356 |
futs = {ex.submit(_process_one, s): s for s in symbols}
|
| 357 |
+
for i, f in enumerate(as_completed(futs, timeout=_SIGNAL_TIMEOUT_SEC * len(symbols))):
|
| 358 |
sym_name = futs[f]
|
| 359 |
+
try:
|
| 360 |
+
r = f.result(timeout=_SIGNAL_TIMEOUT_SEC)
|
| 361 |
+
except Exception as exc:
|
| 362 |
+
logger.warning("Signal generation TIMEOUT/ERROR for %s: %s", sym_name, exc)
|
| 363 |
+
failed_symbols.append(sym_name)
|
| 364 |
+
continue
|
| 365 |
if r:
|
| 366 |
results.append(r)
|
| 367 |
if r.get("ml_signal") == "ERROR":
|
|
|
|
| 386 |
error_count = int((df["ml_signal"] == "ERROR").sum()) if "ml_signal" in df.columns else 0
|
| 387 |
summary = {
|
| 388 |
"date": today,
|
| 389 |
+
"market_id": market.market_id,
|
| 390 |
+
"currency": market.currency,
|
| 391 |
"symbols_processed": len(results),
|
| 392 |
"symbols_total": len(symbols),
|
| 393 |
"symbols_failed": len(failed_symbols),
|
|
|
|
| 398 |
"actionable_signals": int(df["actionable"].sum()),
|
| 399 |
"avg_confidence": round(float(df["confidence"].mean()), 3),
|
| 400 |
"top_picks": df[df["actionable"] & (df["final_signal"] == "BUY")].head(top_n)[
|
| 401 |
+
["market_id", "symbol", "predicted_return_pct", "confidence", "final_signal"]
|
| 402 |
].to_dict("records"),
|
| 403 |
}
|
| 404 |
|
|
|
|
| 407 |
|
| 408 |
def main() -> int:
|
| 409 |
p = argparse.ArgumentParser(description="Daily signal generator")
|
| 410 |
+
p.add_argument(
|
| 411 |
+
"--universe",
|
| 412 |
+
default="bist30",
|
| 413 |
+
choices=["bist30", "bist50", "bist100", "sp100", "sp500", "nasdaq100", "custom"],
|
| 414 |
+
)
|
| 415 |
p.add_argument("--symbols", nargs="*")
|
| 416 |
p.add_argument("--days-ahead", type=int, default=7)
|
| 417 |
p.add_argument("--train-window", type=int, default=504)
|
| 418 |
p.add_argument("--top", type=int, default=10)
|
| 419 |
+
p.add_argument("--min-confidence", type=float, default=15.0)
|
|
|
|
| 420 |
p.add_argument("--market", default=DEFAULT_MARKET_ID, choices=["bist", "us"])
|
| 421 |
+
p.add_argument("--out-dir", default="")
|
| 422 |
args = p.parse_args()
|
| 423 |
|
| 424 |
if args.universe == "custom" and args.symbols:
|
| 425 |
symbols = [s.strip().upper() for s in args.symbols]
|
| 426 |
else:
|
| 427 |
+
result = get_market_universe(args.market, args.universe)
|
|
|
|
| 428 |
symbols = result.symbols
|
| 429 |
+
print(f"Universe {args.market}/{args.universe}: {len(symbols)} symbols")
|
| 430 |
|
| 431 |
print(f"Generating signals for {len(symbols)} symbols...")
|
| 432 |
df, summary = generate_signals(
|
huggingface-space/trading/db_store.py
CHANGED
|
@@ -137,9 +137,50 @@ CREATE TABLE IF NOT EXISTS schema_version (
|
|
| 137 |
version INTEGER PRIMARY KEY,
|
| 138 |
applied_at TEXT NOT NULL DEFAULT (datetime('now'))
|
| 139 |
);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 140 |
"""
|
| 141 |
|
| 142 |
-
_CURRENT_SCHEMA_VERSION =
|
| 143 |
|
| 144 |
|
| 145 |
# ---------------------------------------------------------------------------
|
|
@@ -195,12 +236,63 @@ class TradingStore:
|
|
| 195 |
current = row[0] if row and row[0] else 0
|
| 196 |
|
| 197 |
if current < _CURRENT_SCHEMA_VERSION:
|
|
|
|
| 198 |
conn.execute(
|
| 199 |
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
| 200 |
(_CURRENT_SCHEMA_VERSION,),
|
| 201 |
)
|
| 202 |
conn.commit()
|
| 203 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 204 |
# -----------------------------------------------------------------------
|
| 205 |
# State (key-value)
|
| 206 |
# -----------------------------------------------------------------------
|
|
@@ -623,6 +715,117 @@ class TradingStore:
|
|
| 623 |
logger.info("JSONL→SQLite migration: %s", counts)
|
| 624 |
return counts
|
| 625 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 626 |
def close(self) -> None:
|
| 627 |
"""Close thread-local connection."""
|
| 628 |
if hasattr(self._local, "conn") and self._local.conn:
|
|
|
|
| 137 |
version INTEGER PRIMARY KEY,
|
| 138 |
applied_at TEXT NOT NULL DEFAULT (datetime('now'))
|
| 139 |
);
|
| 140 |
+
|
| 141 |
+
-- Daily run log — tracks every worker invocation to detect gaps
|
| 142 |
+
CREATE TABLE IF NOT EXISTS daily_run_log (
|
| 143 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 144 |
+
date TEXT NOT NULL,
|
| 145 |
+
market_id TEXT NOT NULL DEFAULT 'bist',
|
| 146 |
+
status TEXT NOT NULL,
|
| 147 |
+
trades_executed INTEGER NOT NULL DEFAULT 0,
|
| 148 |
+
elapsed_sec REAL DEFAULT 0,
|
| 149 |
+
equity REAL DEFAULT 0,
|
| 150 |
+
cash REAL DEFAULT 0,
|
| 151 |
+
model_safe INTEGER DEFAULT 0,
|
| 152 |
+
cycle_detail TEXT DEFAULT '{}',
|
| 153 |
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
| 154 |
+
);
|
| 155 |
+
|
| 156 |
+
CREATE INDEX IF NOT EXISTS idx_run_log_date ON daily_run_log(date);
|
| 157 |
+
CREATE INDEX IF NOT EXISTS idx_run_log_market ON daily_run_log(market_id);
|
| 158 |
+
|
| 159 |
+
-- Scorecard history — periodic readiness assessment snapshots
|
| 160 |
+
CREATE TABLE IF NOT EXISTS scorecard_history (
|
| 161 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 162 |
+
date TEXT NOT NULL,
|
| 163 |
+
market_id TEXT NOT NULL DEFAULT 'bist',
|
| 164 |
+
verdict TEXT NOT NULL DEFAULT 'NO_GO',
|
| 165 |
+
checks_passed INTEGER NOT NULL DEFAULT 0,
|
| 166 |
+
checks_total INTEGER NOT NULL DEFAULT 0,
|
| 167 |
+
closed_trades INTEGER NOT NULL DEFAULT 0,
|
| 168 |
+
win_rate_pct REAL DEFAULT 0,
|
| 169 |
+
profit_factor REAL DEFAULT 0,
|
| 170 |
+
max_drawdown_pct REAL DEFAULT 0,
|
| 171 |
+
total_return_pct REAL DEFAULT 0,
|
| 172 |
+
consecutive_run_days INTEGER DEFAULT 0,
|
| 173 |
+
predictions_completed INTEGER DEFAULT 0,
|
| 174 |
+
predictions_needed INTEGER DEFAULT 10,
|
| 175 |
+
bootstrap_unlocked INTEGER DEFAULT 0,
|
| 176 |
+
detail_json TEXT DEFAULT '{}',
|
| 177 |
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
| 178 |
+
);
|
| 179 |
+
|
| 180 |
+
CREATE INDEX IF NOT EXISTS idx_scorecard_date ON scorecard_history(date);
|
| 181 |
"""
|
| 182 |
|
| 183 |
+
_CURRENT_SCHEMA_VERSION = 2
|
| 184 |
|
| 185 |
|
| 186 |
# ---------------------------------------------------------------------------
|
|
|
|
| 236 |
current = row[0] if row and row[0] else 0
|
| 237 |
|
| 238 |
if current < _CURRENT_SCHEMA_VERSION:
|
| 239 |
+
self._run_migrations(current)
|
| 240 |
conn.execute(
|
| 241 |
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
| 242 |
(_CURRENT_SCHEMA_VERSION,),
|
| 243 |
)
|
| 244 |
conn.commit()
|
| 245 |
|
| 246 |
+
def _run_migrations(self, from_version: int) -> None:
|
| 247 |
+
"""Run incremental schema migrations."""
|
| 248 |
+
conn = self._get_conn()
|
| 249 |
+
if from_version < 2:
|
| 250 |
+
# v2: add daily_run_log and scorecard_history (idempotent via IF NOT EXISTS in _SCHEMA_SQL)
|
| 251 |
+
tables = [r[0] for r in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()]
|
| 252 |
+
if "daily_run_log" not in tables:
|
| 253 |
+
conn.executescript("""
|
| 254 |
+
CREATE TABLE IF NOT EXISTS daily_run_log (
|
| 255 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 256 |
+
date TEXT NOT NULL,
|
| 257 |
+
market_id TEXT NOT NULL DEFAULT 'bist',
|
| 258 |
+
status TEXT NOT NULL,
|
| 259 |
+
trades_executed INTEGER NOT NULL DEFAULT 0,
|
| 260 |
+
elapsed_sec REAL DEFAULT 0,
|
| 261 |
+
equity REAL DEFAULT 0,
|
| 262 |
+
cash REAL DEFAULT 0,
|
| 263 |
+
model_safe INTEGER DEFAULT 0,
|
| 264 |
+
cycle_detail TEXT DEFAULT '{}',
|
| 265 |
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
| 266 |
+
);
|
| 267 |
+
CREATE INDEX IF NOT EXISTS idx_run_log_date ON daily_run_log(date);
|
| 268 |
+
CREATE INDEX IF NOT EXISTS idx_run_log_market ON daily_run_log(market_id);
|
| 269 |
+
""")
|
| 270 |
+
if "scorecard_history" not in tables:
|
| 271 |
+
conn.executescript("""
|
| 272 |
+
CREATE TABLE IF NOT EXISTS scorecard_history (
|
| 273 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 274 |
+
date TEXT NOT NULL,
|
| 275 |
+
market_id TEXT NOT NULL DEFAULT 'bist',
|
| 276 |
+
verdict TEXT NOT NULL DEFAULT 'NO_GO',
|
| 277 |
+
checks_passed INTEGER NOT NULL DEFAULT 0,
|
| 278 |
+
checks_total INTEGER NOT NULL DEFAULT 0,
|
| 279 |
+
closed_trades INTEGER NOT NULL DEFAULT 0,
|
| 280 |
+
win_rate_pct REAL DEFAULT 0,
|
| 281 |
+
profit_factor REAL DEFAULT 0,
|
| 282 |
+
max_drawdown_pct REAL DEFAULT 0,
|
| 283 |
+
total_return_pct REAL DEFAULT 0,
|
| 284 |
+
consecutive_run_days INTEGER DEFAULT 0,
|
| 285 |
+
predictions_completed INTEGER DEFAULT 0,
|
| 286 |
+
predictions_needed INTEGER DEFAULT 10,
|
| 287 |
+
bootstrap_unlocked INTEGER DEFAULT 0,
|
| 288 |
+
detail_json TEXT DEFAULT '{}',
|
| 289 |
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
| 290 |
+
);
|
| 291 |
+
CREATE INDEX IF NOT EXISTS idx_scorecard_date ON scorecard_history(date);
|
| 292 |
+
""")
|
| 293 |
+
conn.commit()
|
| 294 |
+
logger.info("Schema migrated to v2 (daily_run_log + scorecard_history)")
|
| 295 |
+
|
| 296 |
# -----------------------------------------------------------------------
|
| 297 |
# State (key-value)
|
| 298 |
# -----------------------------------------------------------------------
|
|
|
|
| 715 |
logger.info("JSONL→SQLite migration: %s", counts)
|
| 716 |
return counts
|
| 717 |
|
| 718 |
+
# -----------------------------------------------------------------------
|
| 719 |
+
# Daily run log
|
| 720 |
+
# -----------------------------------------------------------------------
|
| 721 |
+
|
| 722 |
+
def record_run(
|
| 723 |
+
self,
|
| 724 |
+
date: str,
|
| 725 |
+
market_id: str,
|
| 726 |
+
status: str,
|
| 727 |
+
trades_executed: int = 0,
|
| 728 |
+
elapsed_sec: float = 0.0,
|
| 729 |
+
equity: float = 0.0,
|
| 730 |
+
cash: float = 0.0,
|
| 731 |
+
model_safe: bool = False,
|
| 732 |
+
cycle_detail: Optional[Dict] = None,
|
| 733 |
+
) -> None:
|
| 734 |
+
with self._transaction() as cur:
|
| 735 |
+
cur.execute("""
|
| 736 |
+
INSERT INTO daily_run_log
|
| 737 |
+
(date, market_id, status, trades_executed, elapsed_sec,
|
| 738 |
+
equity, cash, model_safe, cycle_detail)
|
| 739 |
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
| 740 |
+
""", (date, market_id, status, trades_executed, round(elapsed_sec, 1),
|
| 741 |
+
round(equity, 2), round(cash, 2), 1 if model_safe else 0,
|
| 742 |
+
json.dumps(cycle_detail or {}, default=str)))
|
| 743 |
+
|
| 744 |
+
def get_run_log(self, market_id: str = "bist", limit: int = 200) -> List[Dict[str, Any]]:
|
| 745 |
+
conn = self._get_conn()
|
| 746 |
+
rows = conn.execute(
|
| 747 |
+
"SELECT * FROM daily_run_log WHERE market_id = ? ORDER BY date DESC LIMIT ?",
|
| 748 |
+
(market_id, limit),
|
| 749 |
+
).fetchall()
|
| 750 |
+
return [dict(r) for r in rows]
|
| 751 |
+
|
| 752 |
+
def get_consecutive_run_days(self, market_id: str = "bist") -> int:
|
| 753 |
+
"""Count consecutive trading days with at least one run, going back from today."""
|
| 754 |
+
conn = self._get_conn()
|
| 755 |
+
rows = conn.execute(
|
| 756 |
+
"SELECT DISTINCT date FROM daily_run_log WHERE market_id = ? ORDER BY date DESC",
|
| 757 |
+
(market_id,),
|
| 758 |
+
).fetchall()
|
| 759 |
+
if not rows:
|
| 760 |
+
return 0
|
| 761 |
+
dates = [r[0] for r in rows]
|
| 762 |
+
from datetime import datetime as _dt, timedelta
|
| 763 |
+
streak = 0
|
| 764 |
+
# Start from the most recent run date (today may not have run yet)
|
| 765 |
+
expected = _dt.strptime(dates[0], "%Y-%m-%d").date()
|
| 766 |
+
today = _dt.now().date()
|
| 767 |
+
# Only accept if the most recent run is today or the previous trading day
|
| 768 |
+
days_since_last = (today - expected).days
|
| 769 |
+
if days_since_last > 3: # More than a long weekend gap
|
| 770 |
+
return 0
|
| 771 |
+
for d in dates:
|
| 772 |
+
run_date = _dt.strptime(d, "%Y-%m-%d").date()
|
| 773 |
+
# Allow weekend gaps (Sat/Sun are not trading days)
|
| 774 |
+
while expected.weekday() in (5, 6): # skip weekends
|
| 775 |
+
expected -= timedelta(days=1)
|
| 776 |
+
if run_date == expected:
|
| 777 |
+
streak += 1
|
| 778 |
+
expected -= timedelta(days=1)
|
| 779 |
+
elif run_date < expected:
|
| 780 |
+
break # gap found
|
| 781 |
+
return streak
|
| 782 |
+
|
| 783 |
+
# -----------------------------------------------------------------------
|
| 784 |
+
# Scorecard history
|
| 785 |
+
# -----------------------------------------------------------------------
|
| 786 |
+
|
| 787 |
+
def record_scorecard(
|
| 788 |
+
self,
|
| 789 |
+
date: str,
|
| 790 |
+
market_id: str,
|
| 791 |
+
verdict: str,
|
| 792 |
+
checks_passed: int,
|
| 793 |
+
checks_total: int,
|
| 794 |
+
closed_trades: int = 0,
|
| 795 |
+
win_rate_pct: float = 0.0,
|
| 796 |
+
profit_factor: float = 0.0,
|
| 797 |
+
max_drawdown_pct: float = 0.0,
|
| 798 |
+
total_return_pct: float = 0.0,
|
| 799 |
+
consecutive_run_days: int = 0,
|
| 800 |
+
predictions_completed: int = 0,
|
| 801 |
+
predictions_needed: int = 10,
|
| 802 |
+
bootstrap_unlocked: bool = False,
|
| 803 |
+
detail_json: Optional[Dict] = None,
|
| 804 |
+
) -> None:
|
| 805 |
+
with self._transaction() as cur:
|
| 806 |
+
cur.execute("""
|
| 807 |
+
INSERT INTO scorecard_history
|
| 808 |
+
(date, market_id, verdict, checks_passed, checks_total,
|
| 809 |
+
closed_trades, win_rate_pct, profit_factor, max_drawdown_pct,
|
| 810 |
+
total_return_pct, consecutive_run_days,
|
| 811 |
+
predictions_completed, predictions_needed, bootstrap_unlocked,
|
| 812 |
+
detail_json)
|
| 813 |
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
| 814 |
+
""", (date, market_id, verdict, checks_passed, checks_total,
|
| 815 |
+
closed_trades, round(win_rate_pct, 2), round(profit_factor, 2),
|
| 816 |
+
round(max_drawdown_pct, 2), round(total_return_pct, 2),
|
| 817 |
+
consecutive_run_days, predictions_completed, predictions_needed,
|
| 818 |
+
1 if bootstrap_unlocked else 0,
|
| 819 |
+
json.dumps(detail_json or {}, default=str)))
|
| 820 |
+
|
| 821 |
+
def get_scorecard_history(self, market_id: str = "bist", limit: int = 30) -> List[Dict[str, Any]]:
|
| 822 |
+
conn = self._get_conn()
|
| 823 |
+
rows = conn.execute(
|
| 824 |
+
"SELECT * FROM scorecard_history WHERE market_id = ? ORDER BY date DESC LIMIT ?",
|
| 825 |
+
(market_id, limit),
|
| 826 |
+
).fetchall()
|
| 827 |
+
return [dict(r) for r in rows]
|
| 828 |
+
|
| 829 |
def close(self) -> None:
|
| 830 |
"""Close thread-local connection."""
|
| 831 |
if hasattr(self._local, "conn") and self._local.conn:
|
huggingface-space/trading/market_registry.py
CHANGED
|
@@ -25,6 +25,11 @@ class MarketConfig:
|
|
| 25 |
run_end_minute: int = 0
|
| 26 |
scan_start_hour: int = 18 # hour in market's local timezone to run daily scan
|
| 27 |
scan_start_minute: int = 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
|
| 30 |
_MARKETS = {
|
|
@@ -42,6 +47,9 @@ _MARKETS = {
|
|
| 42 |
run_end_minute=0,
|
| 43 |
scan_start_hour=19, # 19:00 TR (1hr after 18:00 close)
|
| 44 |
scan_start_minute=0,
|
|
|
|
|
|
|
|
|
|
| 45 |
),
|
| 46 |
"us": MarketConfig(
|
| 47 |
market_id="us",
|
|
@@ -57,6 +65,9 @@ _MARKETS = {
|
|
| 57 |
run_end_minute=0,
|
| 58 |
scan_start_hour=17, # 17:00 ET (1hr after 16:00 close)
|
| 59 |
scan_start_minute=0,
|
|
|
|
|
|
|
|
|
|
| 60 |
),
|
| 61 |
}
|
| 62 |
|
|
@@ -114,6 +125,13 @@ def get_trading_db_path(market_id: str = DEFAULT_MARKET_ID) -> Path:
|
|
| 114 |
return get_market_storage_dir(market_key) / "trading.db"
|
| 115 |
|
| 116 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 117 |
def get_signal_output_dir(market_id: str = DEFAULT_MARKET_ID) -> Path:
|
| 118 |
market_key = get_market_config(market_id).market_id
|
| 119 |
if market_key == DEFAULT_MARKET_ID:
|
|
|
|
| 25 |
run_end_minute: int = 0
|
| 26 |
scan_start_hour: int = 18 # hour in market's local timezone to run daily scan
|
| 27 |
scan_start_minute: int = 0
|
| 28 |
+
# Market hours in UTC — used by RiskGate to block off-hours orders
|
| 29 |
+
market_open_hour_utc: int = 7
|
| 30 |
+
market_close_hour_utc: int = 15
|
| 31 |
+
# Whether this market is approved for live/paper trading
|
| 32 |
+
trading_enabled: bool = True
|
| 33 |
|
| 34 |
|
| 35 |
_MARKETS = {
|
|
|
|
| 47 |
run_end_minute=0,
|
| 48 |
scan_start_hour=19, # 19:00 TR (1hr after 18:00 close)
|
| 49 |
scan_start_minute=0,
|
| 50 |
+
market_open_hour_utc=7, # BIST 10:00 Istanbul = 07:00 UTC
|
| 51 |
+
market_close_hour_utc=15, # BIST 18:00 Istanbul = 15:00 UTC
|
| 52 |
+
trading_enabled=True,
|
| 53 |
),
|
| 54 |
"us": MarketConfig(
|
| 55 |
market_id="us",
|
|
|
|
| 65 |
run_end_minute=0,
|
| 66 |
scan_start_hour=17, # 17:00 ET (1hr after 16:00 close)
|
| 67 |
scan_start_minute=0,
|
| 68 |
+
market_open_hour_utc=13, # NYSE 09:00 ET ≈ 13:00 UTC (DST: 14:00)
|
| 69 |
+
market_close_hour_utc=21, # NYSE 16:00 ET ≈ 21:00 UTC (DST: 20:00)
|
| 70 |
+
trading_enabled=True, # ENABLED for paper trading
|
| 71 |
),
|
| 72 |
}
|
| 73 |
|
|
|
|
| 125 |
return get_market_storage_dir(market_key) / "trading.db"
|
| 126 |
|
| 127 |
|
| 128 |
+
def get_eligibility_path(market_id: str = DEFAULT_MARKET_ID) -> Path:
|
| 129 |
+
market_key = get_market_config(market_id).market_id
|
| 130 |
+
if market_key == DEFAULT_MARKET_ID:
|
| 131 |
+
return _PAPER_TRADING_DIR / "stock_eligibility.json"
|
| 132 |
+
return get_market_storage_dir(market_key) / "eligibility.json"
|
| 133 |
+
|
| 134 |
+
|
| 135 |
def get_signal_output_dir(market_id: str = DEFAULT_MARKET_ID) -> Path:
|
| 136 |
market_key = get_market_config(market_id).market_id
|
| 137 |
if market_key == DEFAULT_MARKET_ID:
|
huggingface-space/trading/midas_broker.py
CHANGED
|
@@ -766,7 +766,7 @@ class MidasBroker(BrokerBase):
|
|
| 766 |
"pnl_pct": round((equity / max(1, initial) - 1) * 100, 2),
|
| 767 |
"positions": positions,
|
| 768 |
"trade_count": len(self._tracker.get_all_orders()),
|
| 769 |
-
"total_commission":
|
| 770 |
}
|
| 771 |
except Exception as e:
|
| 772 |
logger.error("Failed to get Midas account info: %s", e)
|
|
|
|
| 766 |
"pnl_pct": round((equity / max(1, initial) - 1) * 100, 2),
|
| 767 |
"positions": positions,
|
| 768 |
"trade_count": len(self._tracker.get_all_orders()),
|
| 769 |
+
"total_commission": round(sum(o.get("commission", 0) for o in self._tracker.get_all_orders()), 2),
|
| 770 |
}
|
| 771 |
except Exception as e:
|
| 772 |
logger.error("Failed to get Midas account info: %s", e)
|
huggingface-space/trading/model_risk.py
CHANGED
|
@@ -27,9 +27,16 @@ import numpy as np
|
|
| 27 |
|
| 28 |
logger = logging.getLogger("trading.model_risk")
|
| 29 |
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
|
| 34 |
|
| 35 |
# ---------------------------------------------------------------------------
|
|
@@ -54,6 +61,10 @@ class ModelRiskConfig:
|
|
| 54 |
# Auto-disable after N days without any valid evaluation
|
| 55 |
max_days_without_eval: int = 14
|
| 56 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 57 |
# Regime change detection
|
| 58 |
vol_regime_change_threshold: float = 2.0 # if vol doubles → warning
|
| 59 |
|
|
@@ -89,17 +100,26 @@ class ModelRiskManager:
|
|
| 89 |
4. is_model_safe() — called before executing any trade
|
| 90 |
"""
|
| 91 |
|
| 92 |
-
def __init__(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
self.config = config or ModelRiskConfig()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 94 |
self._state = self._load_state()
|
| 95 |
|
| 96 |
# -- State persistence --
|
| 97 |
|
| 98 |
def _load_state(self) -> Dict[str, Any]:
|
| 99 |
-
|
| 100 |
-
if
|
| 101 |
try:
|
| 102 |
-
return json.loads(
|
| 103 |
except Exception:
|
| 104 |
pass
|
| 105 |
return {
|
|
@@ -114,9 +134,9 @@ class ModelRiskManager:
|
|
| 114 |
}
|
| 115 |
|
| 116 |
def _save_state(self) -> None:
|
| 117 |
-
|
| 118 |
self._state["updated_at"] = datetime.now(timezone.utc).isoformat()
|
| 119 |
-
|
| 120 |
|
| 121 |
# -- Prediction logging --
|
| 122 |
|
|
@@ -127,7 +147,7 @@ class ModelRiskManager:
|
|
| 127 |
confidence: float,
|
| 128 |
days_ahead: int = 7,
|
| 129 |
) -> None:
|
| 130 |
-
|
| 131 |
record = {
|
| 132 |
"symbol": symbol,
|
| 133 |
"prediction_date": datetime.now(timezone.utc).strftime("%Y-%m-%d"),
|
|
@@ -137,7 +157,7 @@ class ModelRiskManager:
|
|
| 137 |
"actual_return_pct": None,
|
| 138 |
"outcome_date": None,
|
| 139 |
}
|
| 140 |
-
with open(
|
| 141 |
f.write(json.dumps(record) + "\n")
|
| 142 |
|
| 143 |
def log_outcome(
|
|
@@ -147,10 +167,10 @@ class ModelRiskManager:
|
|
| 147 |
actual_return_pct: float,
|
| 148 |
) -> None:
|
| 149 |
"""Update a prediction with its actual outcome."""
|
| 150 |
-
if not
|
| 151 |
return
|
| 152 |
|
| 153 |
-
lines =
|
| 154 |
updated = False
|
| 155 |
new_lines = []
|
| 156 |
|
|
@@ -175,7 +195,7 @@ class ModelRiskManager:
|
|
| 175 |
new_lines.append(line)
|
| 176 |
|
| 177 |
if updated:
|
| 178 |
-
|
| 179 |
|
| 180 |
# -- Evaluation --
|
| 181 |
|
|
@@ -197,8 +217,9 @@ class ModelRiskManager:
|
|
| 197 |
"needed": self.config.min_samples_for_eval,
|
| 198 |
"model_enabled": self._state["model_enabled"],
|
| 199 |
}
|
| 200 |
-
|
| 201 |
-
|
|
|
|
| 202 |
return result
|
| 203 |
|
| 204 |
# Compute metrics
|
|
@@ -298,11 +319,34 @@ class ModelRiskManager:
|
|
| 298 |
def is_model_safe(self) -> bool:
|
| 299 |
"""Check if the model is currently safe to use for trading signals.
|
| 300 |
|
| 301 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 302 |
"""
|
| 303 |
if not self._state["model_enabled"]:
|
| 304 |
return False
|
| 305 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 306 |
# Check stale evaluation
|
| 307 |
last_eval = self._state.get("last_evaluation")
|
| 308 |
if last_eval:
|
|
@@ -322,11 +366,45 @@ class ModelRiskManager:
|
|
| 322 |
|
| 323 |
return True
|
| 324 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 325 |
def get_status(self) -> Dict[str, Any]:
|
| 326 |
"""Return current model risk status (safe for logging/display)."""
|
| 327 |
return {
|
| 328 |
"model_enabled": self._state["model_enabled"],
|
| 329 |
"model_safe": self.is_model_safe(),
|
|
|
|
|
|
|
| 330 |
"disabled_reason": self._state.get("disabled_reason"),
|
| 331 |
"disabled_at": self._state.get("disabled_at"),
|
| 332 |
"last_evaluation": self._state.get("last_evaluation"),
|
|
@@ -355,10 +433,10 @@ class ModelRiskManager:
|
|
| 355 |
# -- Internal --
|
| 356 |
|
| 357 |
def _load_completed_predictions(self) -> List[Dict[str, Any]]:
|
| 358 |
-
if not
|
| 359 |
return []
|
| 360 |
records = []
|
| 361 |
-
for line in
|
| 362 |
try:
|
| 363 |
rec = json.loads(line)
|
| 364 |
if rec.get("actual_return_pct") is not None:
|
|
|
|
| 27 |
|
| 28 |
logger = logging.getLogger("trading.model_risk")
|
| 29 |
|
| 30 |
+
# Legacy global defaults — kept only for backward-compatible helpers.
|
| 31 |
+
# All new code should use the instance-level paths on ModelRiskManager.
|
| 32 |
+
_DEFAULT_STATE_DIR = Path("paper_trading/model_risk")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def _state_dir_for_market(market_id: str) -> Path:
|
| 36 |
+
"""Return market-isolated model-risk state directory."""
|
| 37 |
+
from trading.market_registry import DEFAULT_MARKET_ID, get_market_storage_dir
|
| 38 |
+
key = (market_id or DEFAULT_MARKET_ID).strip().lower()
|
| 39 |
+
return get_market_storage_dir(key) / "model_risk"
|
| 40 |
|
| 41 |
|
| 42 |
# ---------------------------------------------------------------------------
|
|
|
|
| 61 |
# Auto-disable after N days without any valid evaluation
|
| 62 |
max_days_without_eval: int = 14
|
| 63 |
|
| 64 |
+
# Bootstrap grace period — allow limited trading while predictions accumulate
|
| 65 |
+
bootstrap_grace_days: int = 14 # days after first prediction to allow trading
|
| 66 |
+
bootstrap_max_positions: int = 2 # max concurrent positions during bootstrap
|
| 67 |
+
|
| 68 |
# Regime change detection
|
| 69 |
vol_regime_change_threshold: float = 2.0 # if vol doubles → warning
|
| 70 |
|
|
|
|
| 100 |
4. is_model_safe() — called before executing any trade
|
| 101 |
"""
|
| 102 |
|
| 103 |
+
def __init__(
|
| 104 |
+
self,
|
| 105 |
+
config: Optional[ModelRiskConfig] = None,
|
| 106 |
+
market_id: str = "bist",
|
| 107 |
+
) -> None:
|
| 108 |
self.config = config or ModelRiskConfig()
|
| 109 |
+
self.market_id = market_id
|
| 110 |
+
# Market-isolated paths
|
| 111 |
+
self._state_dir = _state_dir_for_market(market_id)
|
| 112 |
+
self._state_file = self._state_dir / "risk_state.json"
|
| 113 |
+
self._prediction_log = self._state_dir / "prediction_log.jsonl"
|
| 114 |
self._state = self._load_state()
|
| 115 |
|
| 116 |
# -- State persistence --
|
| 117 |
|
| 118 |
def _load_state(self) -> Dict[str, Any]:
|
| 119 |
+
self._state_dir.mkdir(parents=True, exist_ok=True)
|
| 120 |
+
if self._state_file.exists():
|
| 121 |
try:
|
| 122 |
+
return json.loads(self._state_file.read_text())
|
| 123 |
except Exception:
|
| 124 |
pass
|
| 125 |
return {
|
|
|
|
| 134 |
}
|
| 135 |
|
| 136 |
def _save_state(self) -> None:
|
| 137 |
+
self._state_dir.mkdir(parents=True, exist_ok=True)
|
| 138 |
self._state["updated_at"] = datetime.now(timezone.utc).isoformat()
|
| 139 |
+
self._state_file.write_text(json.dumps(self._state, indent=2, default=str))
|
| 140 |
|
| 141 |
# -- Prediction logging --
|
| 142 |
|
|
|
|
| 147 |
confidence: float,
|
| 148 |
days_ahead: int = 7,
|
| 149 |
) -> None:
|
| 150 |
+
self._state_dir.mkdir(parents=True, exist_ok=True)
|
| 151 |
record = {
|
| 152 |
"symbol": symbol,
|
| 153 |
"prediction_date": datetime.now(timezone.utc).strftime("%Y-%m-%d"),
|
|
|
|
| 157 |
"actual_return_pct": None,
|
| 158 |
"outcome_date": None,
|
| 159 |
}
|
| 160 |
+
with open(self._prediction_log, "a") as f:
|
| 161 |
f.write(json.dumps(record) + "\n")
|
| 162 |
|
| 163 |
def log_outcome(
|
|
|
|
| 167 |
actual_return_pct: float,
|
| 168 |
) -> None:
|
| 169 |
"""Update a prediction with its actual outcome."""
|
| 170 |
+
if not self._prediction_log.exists():
|
| 171 |
return
|
| 172 |
|
| 173 |
+
lines = self._prediction_log.read_text().strip().split("\n")
|
| 174 |
updated = False
|
| 175 |
new_lines = []
|
| 176 |
|
|
|
|
| 195 |
new_lines.append(line)
|
| 196 |
|
| 197 |
if updated:
|
| 198 |
+
self._prediction_log.write_text("\n".join(new_lines) + "\n")
|
| 199 |
|
| 200 |
# -- Evaluation --
|
| 201 |
|
|
|
|
| 217 |
"needed": self.config.min_samples_for_eval,
|
| 218 |
"model_enabled": self._state["model_enabled"],
|
| 219 |
}
|
| 220 |
+
# NOT writing last_evaluation here — insufficient data is not
|
| 221 |
+
# a valid evaluation. This prevents the stale-eval bypass in
|
| 222 |
+
# is_model_safe() from hiding the bootstrap gap.
|
| 223 |
return result
|
| 224 |
|
| 225 |
# Compute metrics
|
|
|
|
| 319 |
def is_model_safe(self) -> bool:
|
| 320 |
"""Check if the model is currently safe to use for trading signals.
|
| 321 |
|
| 322 |
+
Checks:
|
| 323 |
+
1. model_enabled flag (set by evaluate() on degradation)
|
| 324 |
+
2. Bootstrap guard: must have completed at least one real evaluation
|
| 325 |
+
(total_evaluations > 0) — BUT allows limited trading during
|
| 326 |
+
bootstrap_grace_days via is_bootstrap_mode()
|
| 327 |
+
3. Stale evaluation: no valid eval in max_days_without_eval → unsafe
|
| 328 |
"""
|
| 329 |
if not self._state["model_enabled"]:
|
| 330 |
return False
|
| 331 |
|
| 332 |
+
# Bootstrap guard — no real evaluation ever completed
|
| 333 |
+
if self._state.get("total_evaluations", 0) == 0:
|
| 334 |
+
# Check if within bootstrap grace period
|
| 335 |
+
if self._in_bootstrap_grace():
|
| 336 |
+
logger.info(
|
| 337 |
+
"Model risk: bootstrap grace period — allowing limited trading "
|
| 338 |
+
"(max %d positions). Need >= %d completed predictions for full evaluation.",
|
| 339 |
+
self.config.bootstrap_max_positions,
|
| 340 |
+
self.config.min_samples_for_eval,
|
| 341 |
+
)
|
| 342 |
+
return True # allow limited trading (caller checks is_bootstrap_mode)
|
| 343 |
+
logger.warning(
|
| 344 |
+
"Model risk: bootstrap period expired — 0 completed evaluations. "
|
| 345 |
+
"Need >= %d completed predictions before trading is allowed.",
|
| 346 |
+
self.config.min_samples_for_eval,
|
| 347 |
+
)
|
| 348 |
+
return False
|
| 349 |
+
|
| 350 |
# Check stale evaluation
|
| 351 |
last_eval = self._state.get("last_evaluation")
|
| 352 |
if last_eval:
|
|
|
|
| 366 |
|
| 367 |
return True
|
| 368 |
|
| 369 |
+
def _in_bootstrap_grace(self) -> bool:
|
| 370 |
+
"""Check if we are within the bootstrap grace period.
|
| 371 |
+
|
| 372 |
+
Grace period starts from the date of the first logged prediction.
|
| 373 |
+
"""
|
| 374 |
+
if not self._prediction_log.exists():
|
| 375 |
+
return False
|
| 376 |
+
try:
|
| 377 |
+
first_line = self._prediction_log.open().readline().strip()
|
| 378 |
+
if not first_line:
|
| 379 |
+
return False
|
| 380 |
+
rec = json.loads(first_line)
|
| 381 |
+
first_date = datetime.strptime(rec["prediction_date"], "%Y-%m-%d").replace(
|
| 382 |
+
tzinfo=timezone.utc
|
| 383 |
+
)
|
| 384 |
+
age_days = (datetime.now(timezone.utc) - first_date).days
|
| 385 |
+
return age_days <= self.config.bootstrap_grace_days
|
| 386 |
+
except Exception:
|
| 387 |
+
return False
|
| 388 |
+
|
| 389 |
+
def is_bootstrap_mode(self) -> bool:
|
| 390 |
+
"""Return True if system is in bootstrap grace period with limited trading.
|
| 391 |
+
|
| 392 |
+
When True, the caller should limit concurrent positions to
|
| 393 |
+
config.bootstrap_max_positions.
|
| 394 |
+
"""
|
| 395 |
+
return (
|
| 396 |
+
self._state.get("total_evaluations", 0) == 0
|
| 397 |
+
and self._state["model_enabled"]
|
| 398 |
+
and self._in_bootstrap_grace()
|
| 399 |
+
)
|
| 400 |
+
|
| 401 |
def get_status(self) -> Dict[str, Any]:
|
| 402 |
"""Return current model risk status (safe for logging/display)."""
|
| 403 |
return {
|
| 404 |
"model_enabled": self._state["model_enabled"],
|
| 405 |
"model_safe": self.is_model_safe(),
|
| 406 |
+
"bootstrap_mode": self.is_bootstrap_mode(),
|
| 407 |
+
"bootstrap_max_positions": self.config.bootstrap_max_positions if self.is_bootstrap_mode() else None,
|
| 408 |
"disabled_reason": self._state.get("disabled_reason"),
|
| 409 |
"disabled_at": self._state.get("disabled_at"),
|
| 410 |
"last_evaluation": self._state.get("last_evaluation"),
|
|
|
|
| 433 |
# -- Internal --
|
| 434 |
|
| 435 |
def _load_completed_predictions(self) -> List[Dict[str, Any]]:
|
| 436 |
+
if not self._prediction_log.exists():
|
| 437 |
return []
|
| 438 |
records = []
|
| 439 |
+
for line in self._prediction_log.read_text().strip().split("\n"):
|
| 440 |
try:
|
| 441 |
rec = json.loads(line)
|
| 442 |
if rec.get("actual_return_pct") is not None:
|
huggingface-space/trading/performance_scorecard.py
ADDED
|
@@ -0,0 +1,428 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Performance Scorecard — go/no-go readiness evaluation.
|
| 2 |
+
|
| 3 |
+
Evaluates 7 hard criteria that ALL must pass before real money:
|
| 4 |
+
1. DB integrity (no ghost records)
|
| 5 |
+
2. Worker consistency (consecutive run days ≥ 60)
|
| 6 |
+
3. Bootstrap guard unlocked (≥10 completed predictions, model evaluation passed)
|
| 7 |
+
4. Statistical sample size (≥50 closed trades)
|
| 8 |
+
5. Win rate ≥55% AND profit factor ≥1.5 (over 50+ trades)
|
| 9 |
+
6. Max drawdown ≤10% at all times
|
| 10 |
+
7. Walk-forward all periods positive
|
| 11 |
+
|
| 12 |
+
Results are persisted to scorecard_history table for trend tracking.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
from __future__ import annotations
|
| 16 |
+
|
| 17 |
+
import json
|
| 18 |
+
import logging
|
| 19 |
+
from dataclasses import dataclass, field
|
| 20 |
+
from datetime import datetime, timezone
|
| 21 |
+
from pathlib import Path
|
| 22 |
+
from typing import Any, Dict, List, Optional
|
| 23 |
+
|
| 24 |
+
logger = logging.getLogger("trading.scorecard")
|
| 25 |
+
|
| 26 |
+
# ---------------------------------------------------------------------------
|
| 27 |
+
# Thresholds (hard-coded — everything must pass)
|
| 28 |
+
# ---------------------------------------------------------------------------
|
| 29 |
+
|
| 30 |
+
MIN_CLOSED_TRADES = 50
|
| 31 |
+
MIN_WIN_RATE_PCT = 55.0
|
| 32 |
+
MIN_PROFIT_FACTOR = 1.5
|
| 33 |
+
MAX_DRAWDOWN_PCT = 10.0
|
| 34 |
+
MIN_CONSECUTIVE_RUN_DAYS = 60
|
| 35 |
+
MIN_PREDICTIONS_COMPLETED = 10
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@dataclass
|
| 39 |
+
class CheckResult:
|
| 40 |
+
name: str
|
| 41 |
+
passed: bool
|
| 42 |
+
current: Any
|
| 43 |
+
required: Any
|
| 44 |
+
detail: str = ""
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@dataclass
|
| 48 |
+
class Scorecard:
|
| 49 |
+
date: str
|
| 50 |
+
market_id: str
|
| 51 |
+
verdict: str # "GO" or "NO_GO"
|
| 52 |
+
checks: List[CheckResult] = field(default_factory=list)
|
| 53 |
+
# Summary metrics
|
| 54 |
+
closed_trades: int = 0
|
| 55 |
+
win_rate_pct: float = 0.0
|
| 56 |
+
profit_factor: float = 0.0
|
| 57 |
+
max_drawdown_pct: float = 0.0
|
| 58 |
+
total_return_pct: float = 0.0
|
| 59 |
+
consecutive_run_days: int = 0
|
| 60 |
+
predictions_completed: int = 0
|
| 61 |
+
bootstrap_unlocked: bool = False
|
| 62 |
+
|
| 63 |
+
@property
|
| 64 |
+
def checks_passed(self) -> int:
|
| 65 |
+
return sum(1 for c in self.checks if c.passed)
|
| 66 |
+
|
| 67 |
+
@property
|
| 68 |
+
def checks_total(self) -> int:
|
| 69 |
+
return len(self.checks)
|
| 70 |
+
|
| 71 |
+
def to_dict(self) -> Dict[str, Any]:
|
| 72 |
+
return {
|
| 73 |
+
"date": self.date,
|
| 74 |
+
"market_id": self.market_id,
|
| 75 |
+
"verdict": self.verdict,
|
| 76 |
+
"checks_passed": self.checks_passed,
|
| 77 |
+
"checks_total": self.checks_total,
|
| 78 |
+
"closed_trades": self.closed_trades,
|
| 79 |
+
"win_rate_pct": round(self.win_rate_pct, 2),
|
| 80 |
+
"profit_factor": round(self.profit_factor, 2),
|
| 81 |
+
"max_drawdown_pct": round(self.max_drawdown_pct, 2),
|
| 82 |
+
"total_return_pct": round(self.total_return_pct, 2),
|
| 83 |
+
"consecutive_run_days": self.consecutive_run_days,
|
| 84 |
+
"predictions_completed": self.predictions_completed,
|
| 85 |
+
"bootstrap_unlocked": self.bootstrap_unlocked,
|
| 86 |
+
"checks": [
|
| 87 |
+
{
|
| 88 |
+
"name": c.name,
|
| 89 |
+
"passed": c.passed,
|
| 90 |
+
"current": c.current,
|
| 91 |
+
"required": c.required,
|
| 92 |
+
"detail": c.detail,
|
| 93 |
+
}
|
| 94 |
+
for c in self.checks
|
| 95 |
+
],
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
def to_report(self) -> str:
|
| 99 |
+
lines = []
|
| 100 |
+
lines.append("=" * 60)
|
| 101 |
+
lines.append(f" PERFORMANCE SCORECARD — {self.market_id.upper()}")
|
| 102 |
+
lines.append(f" Date: {self.date}")
|
| 103 |
+
lines.append("=" * 60)
|
| 104 |
+
lines.append("")
|
| 105 |
+
lines.append(f" VERDICT: {'✅ GO' if self.verdict == 'GO' else '❌ NO GO'} ({self.checks_passed}/{self.checks_total} passed)")
|
| 106 |
+
lines.append("")
|
| 107 |
+
lines.append("-" * 60)
|
| 108 |
+
for c in self.checks:
|
| 109 |
+
icon = "✅" if c.passed else "❌"
|
| 110 |
+
lines.append(f" {icon} {c.name}")
|
| 111 |
+
lines.append(f" Current: {c.current} | Required: {c.required}")
|
| 112 |
+
if c.detail:
|
| 113 |
+
lines.append(f" {c.detail}")
|
| 114 |
+
lines.append("-" * 60)
|
| 115 |
+
lines.append("")
|
| 116 |
+
lines.append(" METRICS:")
|
| 117 |
+
lines.append(f" Closed trades: {self.closed_trades}")
|
| 118 |
+
lines.append(f" Win rate: {self.win_rate_pct:.1f}%")
|
| 119 |
+
lines.append(f" Profit factor: {self.profit_factor:.2f}")
|
| 120 |
+
lines.append(f" Max drawdown: {self.max_drawdown_pct:.2f}%")
|
| 121 |
+
lines.append(f" Total return: {self.total_return_pct:.2f}%")
|
| 122 |
+
lines.append(f" Consecutive days: {self.consecutive_run_days}")
|
| 123 |
+
lines.append(f" Predictions done: {self.predictions_completed}/{MIN_PREDICTIONS_COMPLETED}")
|
| 124 |
+
lines.append(f" Bootstrap unlocked: {'Yes' if self.bootstrap_unlocked else 'No'}")
|
| 125 |
+
lines.append("=" * 60)
|
| 126 |
+
return "\n".join(lines)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def _resolve_runtime_path(relative: str) -> str:
|
| 130 |
+
"""If a runtime copy exists (macOS launchd on /Volumes), prefer its data paths."""
|
| 131 |
+
runtime_base = Path.home() / "borsa_uygulamasi_runtime" / "huggingface-space"
|
| 132 |
+
runtime_path = runtime_base / relative
|
| 133 |
+
if runtime_path.exists():
|
| 134 |
+
return str(runtime_path)
|
| 135 |
+
return relative
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def evaluate_readiness(
|
| 139 |
+
market_id: str = "bist",
|
| 140 |
+
db_path: Optional[str] = None,
|
| 141 |
+
model_risk_dir: Optional[str] = None,
|
| 142 |
+
) -> Scorecard:
|
| 143 |
+
"""Run all 7 readiness checks and return a Scorecard."""
|
| 144 |
+
from trading.db_store import TradingStore
|
| 145 |
+
from trading.market_registry import get_market_storage_dir
|
| 146 |
+
|
| 147 |
+
if db_path is None:
|
| 148 |
+
from trading.market_registry import get_trading_db_path
|
| 149 |
+
db_path = str(get_trading_db_path(market_id))
|
| 150 |
+
# Prefer runtime copy DB if it exists (daemon accumulates data there)
|
| 151 |
+
db_path = _resolve_runtime_path(db_path)
|
| 152 |
+
|
| 153 |
+
store = TradingStore(db_path=db_path)
|
| 154 |
+
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 155 |
+
|
| 156 |
+
checks: List[CheckResult] = []
|
| 157 |
+
|
| 158 |
+
# --- Gather data ---
|
| 159 |
+
closed_trades = store.get_closed_trades(limit=10_000)
|
| 160 |
+
open_trades = store.get_open_trades()
|
| 161 |
+
equity_curve = store.get_equity_curve()
|
| 162 |
+
consecutive_days = store.get_consecutive_run_days(market_id)
|
| 163 |
+
|
| 164 |
+
# Prediction log
|
| 165 |
+
if model_risk_dir is None:
|
| 166 |
+
storage = get_market_storage_dir(market_id)
|
| 167 |
+
model_risk_dir = str(storage / "model_risk")
|
| 168 |
+
# BIST default path
|
| 169 |
+
if market_id == "bist":
|
| 170 |
+
model_risk_dir = "paper_trading/model_risk"
|
| 171 |
+
# Prefer runtime copy if it exists
|
| 172 |
+
model_risk_dir = _resolve_runtime_path(model_risk_dir)
|
| 173 |
+
|
| 174 |
+
pred_log_path = Path(model_risk_dir) / "prediction_log.jsonl"
|
| 175 |
+
risk_state_path = Path(model_risk_dir) / "risk_state.json"
|
| 176 |
+
|
| 177 |
+
predictions_completed = 0
|
| 178 |
+
if pred_log_path.exists():
|
| 179 |
+
for line in pred_log_path.read_text().strip().split("\n"):
|
| 180 |
+
if not line.strip():
|
| 181 |
+
continue
|
| 182 |
+
try:
|
| 183 |
+
entry = json.loads(line)
|
| 184 |
+
if entry.get("actual_return_pct") is not None:
|
| 185 |
+
predictions_completed += 1
|
| 186 |
+
except Exception:
|
| 187 |
+
pass
|
| 188 |
+
|
| 189 |
+
bootstrap_unlocked = False
|
| 190 |
+
total_evaluations = 0
|
| 191 |
+
if risk_state_path.exists():
|
| 192 |
+
try:
|
| 193 |
+
rs = json.loads(risk_state_path.read_text())
|
| 194 |
+
total_evaluations = rs.get("total_evaluations", 0)
|
| 195 |
+
bootstrap_unlocked = total_evaluations > 0
|
| 196 |
+
except Exception:
|
| 197 |
+
pass
|
| 198 |
+
|
| 199 |
+
# --- CHECK 1: DB integrity ---
|
| 200 |
+
ghost_count = 0
|
| 201 |
+
for t in open_trades:
|
| 202 |
+
# A ghost is an open trade whose symbol is not in current broker positions
|
| 203 |
+
# We check by looking for duplicate symbols - same symbol open more than once
|
| 204 |
+
pass
|
| 205 |
+
# Simpler: check for is_closed=0 trades with zero confidence (pre-hardening artifacts)
|
| 206 |
+
import sqlite3
|
| 207 |
+
conn = sqlite3.connect(db_path)
|
| 208 |
+
cur = conn.cursor()
|
| 209 |
+
cur.execute("""
|
| 210 |
+
SELECT COUNT(*) FROM trades
|
| 211 |
+
WHERE is_closed = 0
|
| 212 |
+
AND (exit_reason = 'ghost_cleanup' OR signal_confidence < 10)
|
| 213 |
+
""")
|
| 214 |
+
ghost_count = cur.fetchone()[0]
|
| 215 |
+
# Also check for duplicate open symbols
|
| 216 |
+
cur.execute("""
|
| 217 |
+
SELECT symbol, COUNT(*) as cnt FROM trades
|
| 218 |
+
WHERE is_closed = 0
|
| 219 |
+
GROUP BY symbol HAVING cnt > 1
|
| 220 |
+
""")
|
| 221 |
+
dup_symbols = cur.fetchall()
|
| 222 |
+
conn.close()
|
| 223 |
+
|
| 224 |
+
db_clean = ghost_count == 0 and len(dup_symbols) == 0
|
| 225 |
+
db_detail = ""
|
| 226 |
+
if ghost_count > 0:
|
| 227 |
+
db_detail += f"{ghost_count} ghost record(s). "
|
| 228 |
+
if dup_symbols:
|
| 229 |
+
db_detail += f"Duplicate open: {[s[0] for s in dup_symbols]}"
|
| 230 |
+
checks.append(CheckResult(
|
| 231 |
+
name="DB Integrity (no ghosts/duplicates)",
|
| 232 |
+
passed=db_clean,
|
| 233 |
+
current="Clean" if db_clean else f"{ghost_count} ghosts, {len(dup_symbols)} dups",
|
| 234 |
+
required="No ghosts, no duplicate open positions",
|
| 235 |
+
detail=db_detail,
|
| 236 |
+
))
|
| 237 |
+
|
| 238 |
+
# --- CHECK 2: Worker consistency ---
|
| 239 |
+
checks.append(CheckResult(
|
| 240 |
+
name=f"Worker Consistency (≥{MIN_CONSECUTIVE_RUN_DAYS} consecutive days)",
|
| 241 |
+
passed=consecutive_days >= MIN_CONSECUTIVE_RUN_DAYS,
|
| 242 |
+
current=f"{consecutive_days} consecutive trading days",
|
| 243 |
+
required=f"≥{MIN_CONSECUTIVE_RUN_DAYS} consecutive trading days",
|
| 244 |
+
detail=f"System ran {consecutive_days}/{MIN_CONSECUTIVE_RUN_DAYS} required days without gaps.",
|
| 245 |
+
))
|
| 246 |
+
|
| 247 |
+
# --- CHECK 3: Bootstrap guard ---
|
| 248 |
+
checks.append(CheckResult(
|
| 249 |
+
name=f"Bootstrap Guard (≥{MIN_PREDICTIONS_COMPLETED} predictions, model evaluated)",
|
| 250 |
+
passed=bootstrap_unlocked,
|
| 251 |
+
current=f"{predictions_completed} predictions, {total_evaluations} evaluations",
|
| 252 |
+
required=f"≥{MIN_PREDICTIONS_COMPLETED} completed predictions + ≥1 successful evaluation",
|
| 253 |
+
detail="Model must prove prediction quality before trading is allowed." if not bootstrap_unlocked else "Bootstrap passed.",
|
| 254 |
+
))
|
| 255 |
+
|
| 256 |
+
# --- CHECK 4: Sample size ---
|
| 257 |
+
n_closed = len(closed_trades)
|
| 258 |
+
checks.append(CheckResult(
|
| 259 |
+
name=f"Sample Size (≥{MIN_CLOSED_TRADES} closed trades)",
|
| 260 |
+
passed=n_closed >= MIN_CLOSED_TRADES,
|
| 261 |
+
current=f"{n_closed} closed trades",
|
| 262 |
+
required=f"≥{MIN_CLOSED_TRADES} closed trades",
|
| 263 |
+
detail=f"Need {max(0, MIN_CLOSED_TRADES - n_closed)} more closed trades for statistical significance.",
|
| 264 |
+
))
|
| 265 |
+
|
| 266 |
+
# --- CHECK 5: Win rate + Profit factor ---
|
| 267 |
+
win_rate = 0.0
|
| 268 |
+
profit_factor = 0.0
|
| 269 |
+
total_pnl = 0.0
|
| 270 |
+
if n_closed > 0:
|
| 271 |
+
wins = [t for t in closed_trades if (t.get("net_pnl") or 0) > 0]
|
| 272 |
+
losses = [t for t in closed_trades if (t.get("net_pnl") or 0) <= 0]
|
| 273 |
+
win_rate = len(wins) / n_closed * 100
|
| 274 |
+
gross_wins = sum(t.get("net_pnl", 0) for t in wins)
|
| 275 |
+
gross_losses = abs(sum(t.get("net_pnl", 0) for t in losses))
|
| 276 |
+
profit_factor = gross_wins / max(1, gross_losses)
|
| 277 |
+
total_pnl = sum(t.get("net_pnl", 0) for t in closed_trades)
|
| 278 |
+
|
| 279 |
+
wr_ok = win_rate >= MIN_WIN_RATE_PCT and n_closed >= MIN_CLOSED_TRADES
|
| 280 |
+
pf_ok = profit_factor >= MIN_PROFIT_FACTOR and n_closed >= MIN_CLOSED_TRADES
|
| 281 |
+
perf_ok = wr_ok and pf_ok
|
| 282 |
+
checks.append(CheckResult(
|
| 283 |
+
name=f"Performance (WR≥{MIN_WIN_RATE_PCT}% & PF≥{MIN_PROFIT_FACTOR})",
|
| 284 |
+
passed=perf_ok,
|
| 285 |
+
current=f"WR={win_rate:.1f}% PF={profit_factor:.2f} (n={n_closed})",
|
| 286 |
+
required=f"WR≥{MIN_WIN_RATE_PCT}% AND PF≥{MIN_PROFIT_FACTOR} over ≥{MIN_CLOSED_TRADES} trades",
|
| 287 |
+
detail="Not enough trades to evaluate." if n_closed < MIN_CLOSED_TRADES else "",
|
| 288 |
+
))
|
| 289 |
+
|
| 290 |
+
# --- CHECK 6: Max drawdown ---
|
| 291 |
+
max_dd = 0.0
|
| 292 |
+
total_return = 0.0
|
| 293 |
+
if len(equity_curve) >= 2:
|
| 294 |
+
equities = [s["equity"] for s in equity_curve]
|
| 295 |
+
initial = equities[0]
|
| 296 |
+
total_return = (equities[-1] - initial) / initial * 100 if initial > 0 else 0
|
| 297 |
+
peak = equities[0]
|
| 298 |
+
for eq in equities:
|
| 299 |
+
if eq > peak:
|
| 300 |
+
peak = eq
|
| 301 |
+
dd = (peak - eq) / peak * 100 if peak > 0 else 0
|
| 302 |
+
if dd > max_dd:
|
| 303 |
+
max_dd = dd
|
| 304 |
+
|
| 305 |
+
dd_ok = max_dd <= MAX_DRAWDOWN_PCT and len(equity_curve) >= MIN_CONSECUTIVE_RUN_DAYS
|
| 306 |
+
checks.append(CheckResult(
|
| 307 |
+
name=f"Max Drawdown (≤{MAX_DRAWDOWN_PCT}%)",
|
| 308 |
+
passed=dd_ok,
|
| 309 |
+
current=f"{max_dd:.2f}% (from {len(equity_curve)} snapshots)",
|
| 310 |
+
required=f"≤{MAX_DRAWDOWN_PCT}% over ≥{MIN_CONSECUTIVE_RUN_DAYS} days",
|
| 311 |
+
detail="Not enough daily snapshots to compute reliable drawdown." if len(equity_curve) < MIN_CONSECUTIVE_RUN_DAYS else "",
|
| 312 |
+
))
|
| 313 |
+
|
| 314 |
+
# --- CHECK 7: Walk-forward all periods positive ---
|
| 315 |
+
wf_dir = Path("walk_forward_out")
|
| 316 |
+
wf_results: List[Dict[str, Any]] = []
|
| 317 |
+
all_positive = True
|
| 318 |
+
|
| 319 |
+
# Load market universe to filter walk-forward results by market
|
| 320 |
+
market_symbols: set = set()
|
| 321 |
+
try:
|
| 322 |
+
from trading.market_registry import get_scan_results_path
|
| 323 |
+
_scan_path = get_scan_results_path(market_id)
|
| 324 |
+
if _scan_path.exists():
|
| 325 |
+
import json as _json
|
| 326 |
+
_scan = _json.loads(_scan_path.read_text())
|
| 327 |
+
# stage1 keys are the scanned symbols for this market
|
| 328 |
+
if "stage1" in _scan and isinstance(_scan["stage1"], dict):
|
| 329 |
+
market_symbols = set(_scan["stage1"].keys())
|
| 330 |
+
elif "stage2" in _scan and isinstance(_scan["stage2"], dict):
|
| 331 |
+
market_symbols = set(_scan["stage2"].keys())
|
| 332 |
+
except Exception:
|
| 333 |
+
pass
|
| 334 |
+
|
| 335 |
+
if wf_dir.exists():
|
| 336 |
+
import csv
|
| 337 |
+
for trades_file in sorted(wf_dir.glob("*_trades.csv")):
|
| 338 |
+
try:
|
| 339 |
+
with open(trades_file) as f:
|
| 340 |
+
rows = list(csv.DictReader(f))
|
| 341 |
+
if len(rows) < 2:
|
| 342 |
+
continue
|
| 343 |
+
# Last sell row has final capital
|
| 344 |
+
sells = [r for r in rows if r.get("type") == "SELL"]
|
| 345 |
+
if sells:
|
| 346 |
+
final_cap = float(sells[-1]["capital"])
|
| 347 |
+
ret = (final_cap - 100_000) / 100_000 * 100
|
| 348 |
+
sym = trades_file.stem.split("_7d_")[0]
|
| 349 |
+
# Filter: only include symbols belonging to the requested market
|
| 350 |
+
if market_symbols and sym not in market_symbols:
|
| 351 |
+
continue
|
| 352 |
+
wf_results.append({"symbol": sym, "return_pct": round(ret, 2), "trades": len(sells)})
|
| 353 |
+
if ret <= 0:
|
| 354 |
+
all_positive = False
|
| 355 |
+
except Exception:
|
| 356 |
+
pass
|
| 357 |
+
|
| 358 |
+
wf_ok = all_positive and len(wf_results) >= 3 # need at least 3 symbols tested
|
| 359 |
+
neg = [r for r in wf_results if r["return_pct"] <= 0]
|
| 360 |
+
checks.append(CheckResult(
|
| 361 |
+
name="Walk-Forward All Periods Positive",
|
| 362 |
+
passed=wf_ok,
|
| 363 |
+
current=f"{len(wf_results)} symbols tested, {len(neg)} negative",
|
| 364 |
+
required="All walk-forward test periods positive (≥3 symbols)",
|
| 365 |
+
detail=f"Negative: {[r['symbol'] for r in neg]}" if neg else "All positive." if wf_results else "No walk-forward data found.",
|
| 366 |
+
))
|
| 367 |
+
|
| 368 |
+
# --- Build scorecard ---
|
| 369 |
+
verdict = "GO" if all(c.passed for c in checks) else "NO_GO"
|
| 370 |
+
sc = Scorecard(
|
| 371 |
+
date=today,
|
| 372 |
+
market_id=market_id,
|
| 373 |
+
verdict=verdict,
|
| 374 |
+
checks=checks,
|
| 375 |
+
closed_trades=n_closed,
|
| 376 |
+
win_rate_pct=win_rate,
|
| 377 |
+
profit_factor=profit_factor,
|
| 378 |
+
max_drawdown_pct=max_dd,
|
| 379 |
+
total_return_pct=total_return,
|
| 380 |
+
consecutive_run_days=consecutive_days,
|
| 381 |
+
predictions_completed=predictions_completed,
|
| 382 |
+
bootstrap_unlocked=bootstrap_unlocked,
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
# Persist to DB
|
| 386 |
+
store.record_scorecard(
|
| 387 |
+
date=today,
|
| 388 |
+
market_id=market_id,
|
| 389 |
+
verdict=verdict,
|
| 390 |
+
checks_passed=sc.checks_passed,
|
| 391 |
+
checks_total=sc.checks_total,
|
| 392 |
+
closed_trades=n_closed,
|
| 393 |
+
win_rate_pct=win_rate,
|
| 394 |
+
profit_factor=profit_factor,
|
| 395 |
+
max_drawdown_pct=max_dd,
|
| 396 |
+
total_return_pct=total_return,
|
| 397 |
+
consecutive_run_days=consecutive_days,
|
| 398 |
+
predictions_completed=predictions_completed,
|
| 399 |
+
predictions_needed=MIN_PREDICTIONS_COMPLETED,
|
| 400 |
+
bootstrap_unlocked=bootstrap_unlocked,
|
| 401 |
+
detail_json=sc.to_dict(),
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
return sc
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
def print_readiness_report(market_id: str = "bist") -> Scorecard:
|
| 408 |
+
"""Evaluate and print a human-readable readiness report."""
|
| 409 |
+
sc = evaluate_readiness(market_id=market_id)
|
| 410 |
+
print(sc.to_report())
|
| 411 |
+
|
| 412 |
+
# Print trend if history exists
|
| 413 |
+
from trading.db_store import TradingStore
|
| 414 |
+
from trading.market_registry import get_trading_db_path
|
| 415 |
+
store = TradingStore(db_path=str(get_trading_db_path(market_id)))
|
| 416 |
+
history = store.get_scorecard_history(market_id=market_id, limit=10)
|
| 417 |
+
if len(history) > 1:
|
| 418 |
+
print("\n TREND (last 10 scorecards):")
|
| 419 |
+
print(f" {'Date':<12} {'Pass':>5} {'Trades':>7} {'WR%':>6} {'PF':>6} {'DD%':>6} {'Days':>5}")
|
| 420 |
+
for h in reversed(history):
|
| 421 |
+
print(
|
| 422 |
+
f" {h['date']:<12} {h['checks_passed']}/{h['checks_total']} "
|
| 423 |
+
f"{h['closed_trades']:>5} {h['win_rate_pct']:>5.1f} {h['profit_factor']:>5.2f} "
|
| 424 |
+
f"{h['max_drawdown_pct']:>5.2f} {h['consecutive_run_days']:>5}"
|
| 425 |
+
)
|
| 426 |
+
print()
|
| 427 |
+
|
| 428 |
+
return sc
|
huggingface-space/trading/risk_gate.py
CHANGED
|
@@ -78,10 +78,9 @@ def make_risk_limits_for_market(market_id: str) -> RiskLimits:
|
|
| 78 |
return RiskLimits() # defaults are BIST-calibrated
|
| 79 |
# US equities — same capital ratios, USD-compatible amounts, NYSE hours
|
| 80 |
return RiskLimits(
|
| 81 |
-
market_open_hour_utc=13,
|
| 82 |
-
market_close_hour_utc=21,
|
| 83 |
-
min_price_tl=1.0,
|
| 84 |
-
# Capital limits same (25K USD/TL both reasonable for respective accounts)
|
| 85 |
)
|
| 86 |
|
| 87 |
|
|
@@ -101,17 +100,21 @@ class RiskGate:
|
|
| 101 |
and tracks daily order count / P&L internally.
|
| 102 |
"""
|
| 103 |
|
|
|
|
|
|
|
| 104 |
def __init__(
|
| 105 |
self,
|
| 106 |
limits: Optional[RiskLimits] = None,
|
| 107 |
monitor: Optional[TradingMonitor] = None,
|
| 108 |
kill_switch: Optional[KillSwitch] = None,
|
| 109 |
alert_manager: Optional[AlertManager] = None,
|
|
|
|
| 110 |
) -> None:
|
| 111 |
self.limits = limits or RiskLimits()
|
| 112 |
self.monitor = monitor
|
| 113 |
self.kill_switch = kill_switch or KillSwitch()
|
| 114 |
self.alerts = alert_manager
|
|
|
|
| 115 |
|
| 116 |
# Daily tracking
|
| 117 |
self._daily_date: Optional[str] = None
|
|
@@ -120,12 +123,56 @@ class RiskGate:
|
|
| 120 |
self._consecutive_losses: int = 0
|
| 121 |
self._peak_equity: float = 0.0
|
| 122 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 123 |
def _reset_daily_if_needed(self) -> None:
|
| 124 |
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 125 |
if self._daily_date != today:
|
| 126 |
self._daily_date = today
|
| 127 |
self._daily_order_count = 0
|
| 128 |
self._daily_realized_pnl = 0.0
|
|
|
|
| 129 |
|
| 130 |
def record_trade_result(self, pnl: float) -> None:
|
| 131 |
"""Call after each trade closes to track consecutive losses."""
|
|
@@ -134,11 +181,13 @@ class RiskGate:
|
|
| 134 |
else:
|
| 135 |
self._consecutive_losses = 0
|
| 136 |
self._daily_realized_pnl += pnl
|
|
|
|
| 137 |
|
| 138 |
def record_equity(self, equity: float) -> None:
|
| 139 |
"""Track peak equity for drawdown calculation."""
|
| 140 |
if equity > self._peak_equity:
|
| 141 |
self._peak_equity = equity
|
|
|
|
| 142 |
|
| 143 |
def check(
|
| 144 |
self,
|
|
@@ -323,6 +372,7 @@ class RiskGate:
|
|
| 323 |
|
| 324 |
# All checks passed
|
| 325 |
self._daily_order_count += 1
|
|
|
|
| 326 |
return RiskCheckResult(
|
| 327 |
passed=True,
|
| 328 |
checks_performed=checks,
|
|
|
|
| 78 |
return RiskLimits() # defaults are BIST-calibrated
|
| 79 |
# US equities — same capital ratios, USD-compatible amounts, NYSE hours
|
| 80 |
return RiskLimits(
|
| 81 |
+
market_open_hour_utc=13,
|
| 82 |
+
market_close_hour_utc=21,
|
| 83 |
+
min_price_tl=1.0,
|
|
|
|
| 84 |
)
|
| 85 |
|
| 86 |
|
|
|
|
| 100 |
and tracks daily order count / P&L internally.
|
| 101 |
"""
|
| 102 |
|
| 103 |
+
_STATE_KEY = "risk_gate_state"
|
| 104 |
+
|
| 105 |
def __init__(
|
| 106 |
self,
|
| 107 |
limits: Optional[RiskLimits] = None,
|
| 108 |
monitor: Optional[TradingMonitor] = None,
|
| 109 |
kill_switch: Optional[KillSwitch] = None,
|
| 110 |
alert_manager: Optional[AlertManager] = None,
|
| 111 |
+
store: Optional[Any] = None,
|
| 112 |
) -> None:
|
| 113 |
self.limits = limits or RiskLimits()
|
| 114 |
self.monitor = monitor
|
| 115 |
self.kill_switch = kill_switch or KillSwitch()
|
| 116 |
self.alerts = alert_manager
|
| 117 |
+
self._store = store # TradingStore for persistence
|
| 118 |
|
| 119 |
# Daily tracking
|
| 120 |
self._daily_date: Optional[str] = None
|
|
|
|
| 123 |
self._consecutive_losses: int = 0
|
| 124 |
self._peak_equity: float = 0.0
|
| 125 |
|
| 126 |
+
# Restore persisted state on startup
|
| 127 |
+
self._load_state()
|
| 128 |
+
|
| 129 |
+
def _load_state(self) -> None:
|
| 130 |
+
"""Load persisted risk counters from SQLite (survives restarts)."""
|
| 131 |
+
if not self._store:
|
| 132 |
+
return
|
| 133 |
+
try:
|
| 134 |
+
saved = self._store.get_state(self._STATE_KEY)
|
| 135 |
+
if not saved or not isinstance(saved, dict):
|
| 136 |
+
return
|
| 137 |
+
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 138 |
+
saved_date = saved.get("daily_date")
|
| 139 |
+
# Daily counters: only restore if same day
|
| 140 |
+
if saved_date == today:
|
| 141 |
+
self._daily_date = today
|
| 142 |
+
self._daily_order_count = int(saved.get("daily_order_count", 0))
|
| 143 |
+
self._daily_realized_pnl = float(saved.get("daily_realized_pnl", 0.0))
|
| 144 |
+
# Cross-day counters: always restore
|
| 145 |
+
self._consecutive_losses = int(saved.get("consecutive_losses", 0))
|
| 146 |
+
self._peak_equity = float(saved.get("peak_equity", 0.0))
|
| 147 |
+
logger.info(
|
| 148 |
+
"RiskGate state restored: consec_losses=%d, peak_equity=%.0f, daily_orders=%d",
|
| 149 |
+
self._consecutive_losses, self._peak_equity, self._daily_order_count,
|
| 150 |
+
)
|
| 151 |
+
except Exception as e:
|
| 152 |
+
logger.warning("Failed to load risk gate state: %s", e)
|
| 153 |
+
|
| 154 |
+
def _save_state(self) -> None:
|
| 155 |
+
"""Persist current risk counters to SQLite."""
|
| 156 |
+
if not self._store:
|
| 157 |
+
return
|
| 158 |
+
try:
|
| 159 |
+
self._store.set_state(self._STATE_KEY, {
|
| 160 |
+
"daily_date": self._daily_date,
|
| 161 |
+
"daily_order_count": self._daily_order_count,
|
| 162 |
+
"daily_realized_pnl": self._daily_realized_pnl,
|
| 163 |
+
"consecutive_losses": self._consecutive_losses,
|
| 164 |
+
"peak_equity": self._peak_equity,
|
| 165 |
+
})
|
| 166 |
+
except Exception as e:
|
| 167 |
+
logger.warning("Failed to save risk gate state: %s", e)
|
| 168 |
+
|
| 169 |
def _reset_daily_if_needed(self) -> None:
|
| 170 |
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 171 |
if self._daily_date != today:
|
| 172 |
self._daily_date = today
|
| 173 |
self._daily_order_count = 0
|
| 174 |
self._daily_realized_pnl = 0.0
|
| 175 |
+
self._save_state()
|
| 176 |
|
| 177 |
def record_trade_result(self, pnl: float) -> None:
|
| 178 |
"""Call after each trade closes to track consecutive losses."""
|
|
|
|
| 181 |
else:
|
| 182 |
self._consecutive_losses = 0
|
| 183 |
self._daily_realized_pnl += pnl
|
| 184 |
+
self._save_state()
|
| 185 |
|
| 186 |
def record_equity(self, equity: float) -> None:
|
| 187 |
"""Track peak equity for drawdown calculation."""
|
| 188 |
if equity > self._peak_equity:
|
| 189 |
self._peak_equity = equity
|
| 190 |
+
self._save_state()
|
| 191 |
|
| 192 |
def check(
|
| 193 |
self,
|
|
|
|
| 372 |
|
| 373 |
# All checks passed
|
| 374 |
self._daily_order_count += 1
|
| 375 |
+
self._save_state()
|
| 376 |
return RiskCheckResult(
|
| 377 |
passed=True,
|
| 378 |
checks_performed=checks,
|
huggingface-space/trading/scanner_engine.py
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import gc
|
| 4 |
+
import json
|
| 5 |
+
import logging
|
| 6 |
+
import time
|
| 7 |
+
from dataclasses import dataclass, field
|
| 8 |
+
from datetime import date, datetime, timedelta, timezone
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import Any, Dict, List, Optional
|
| 11 |
+
|
| 12 |
+
from trading.market_registry import get_scan_results_path, to_provider_symbol
|
| 13 |
+
from trading.universe_provider import get_market_universe
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def _default_backtest_end() -> str:
|
| 17 |
+
"""Return the most recent completed trading weekday as YYYY-MM-DD."""
|
| 18 |
+
d = date.today() - timedelta(days=1)
|
| 19 |
+
while d.weekday() >= 5: # Saturday=5, Sunday=6
|
| 20 |
+
d -= timedelta(days=1)
|
| 21 |
+
return d.isoformat()
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@dataclass(frozen=True)
|
| 25 |
+
class ScanConfig:
|
| 26 |
+
market_id: str
|
| 27 |
+
universe_name: str
|
| 28 |
+
title: str
|
| 29 |
+
symbols: Optional[List[str]] = None
|
| 30 |
+
max_symbols: Optional[int] = None
|
| 31 |
+
min_data_days: int = 252
|
| 32 |
+
min_avg_volume: int = 500_000
|
| 33 |
+
min_price: float = 1.0
|
| 34 |
+
backtest_start: str = "2023-01-01"
|
| 35 |
+
backtest_end: str = field(default_factory=_default_backtest_end)
|
| 36 |
+
train_window: int = 252
|
| 37 |
+
days_ahead: int = 7
|
| 38 |
+
min_dir_acc: float = 0.53
|
| 39 |
+
min_sharpe: float = 0.0
|
| 40 |
+
min_hit_rate: float = 40.0
|
| 41 |
+
min_trades: int = 3
|
| 42 |
+
|
| 43 |
+
@property
|
| 44 |
+
def final_results_file(self) -> Path:
|
| 45 |
+
return get_scan_results_path(self.market_id, completed=True)
|
| 46 |
+
|
| 47 |
+
@property
|
| 48 |
+
def work_results_file(self) -> Path:
|
| 49 |
+
return get_scan_results_path(self.market_id, completed=False)
|
| 50 |
+
|
| 51 |
+
@property
|
| 52 |
+
def universe_key(self) -> str:
|
| 53 |
+
parts = [self.market_id, self.universe_name]
|
| 54 |
+
if self.symbols:
|
| 55 |
+
parts.append("symbols=" + ",".join(self.symbols))
|
| 56 |
+
if self.max_symbols:
|
| 57 |
+
parts.append(f"max={self.max_symbols}")
|
| 58 |
+
return "|".join(parts)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def _load_results(config: ScanConfig) -> Dict[str, Any]:
|
| 62 |
+
src = None
|
| 63 |
+
if config.work_results_file.exists():
|
| 64 |
+
src = config.work_results_file
|
| 65 |
+
elif config.final_results_file.exists():
|
| 66 |
+
src = config.final_results_file
|
| 67 |
+
|
| 68 |
+
if src is not None and src.exists():
|
| 69 |
+
try:
|
| 70 |
+
return json.loads(src.read_text())
|
| 71 |
+
except Exception:
|
| 72 |
+
pass
|
| 73 |
+
return {
|
| 74 |
+
"market_id": config.market_id,
|
| 75 |
+
"scan_started": None,
|
| 76 |
+
"universe": None,
|
| 77 |
+
"universe_key": None,
|
| 78 |
+
"stage1": {},
|
| 79 |
+
"stage2": {},
|
| 80 |
+
"completed": False,
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def _save_results(config: ScanConfig, data: Dict[str, Any]) -> None:
|
| 85 |
+
config.work_results_file.parent.mkdir(parents=True, exist_ok=True)
|
| 86 |
+
data["market_id"] = config.market_id
|
| 87 |
+
data["updated_at"] = datetime.now(timezone.utc).isoformat()
|
| 88 |
+
tmp = config.work_results_file.with_suffix(".tmp")
|
| 89 |
+
tmp.write_text(json.dumps(data, indent=2, default=str))
|
| 90 |
+
tmp.rename(config.work_results_file)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _publish_final_if_complete(config: ScanConfig, data: Dict[str, Any]) -> None:
|
| 94 |
+
if not data.get("completed", False):
|
| 95 |
+
return
|
| 96 |
+
config.final_results_file.parent.mkdir(parents=True, exist_ok=True)
|
| 97 |
+
tmp = config.final_results_file.with_suffix(".tmp")
|
| 98 |
+
tmp.write_text(json.dumps(data, indent=2, default=str))
|
| 99 |
+
tmp.rename(config.final_results_file)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def stage1_prefilter(symbol: str, config: ScanConfig) -> Dict[str, Any]:
|
| 103 |
+
import yfinance as yf
|
| 104 |
+
|
| 105 |
+
ticker = to_provider_symbol(symbol, market_id=config.market_id)
|
| 106 |
+
result: Dict[str, Any] = {
|
| 107 |
+
"market_id": config.market_id,
|
| 108 |
+
"symbol": symbol,
|
| 109 |
+
"provider_symbol": ticker,
|
| 110 |
+
"passed": False,
|
| 111 |
+
"reason": "",
|
| 112 |
+
"avg_volume": 0.0,
|
| 113 |
+
"data_days": 0,
|
| 114 |
+
"last_price": 0.0,
|
| 115 |
+
"checked_at": datetime.now(timezone.utc).isoformat(),
|
| 116 |
+
}
|
| 117 |
+
|
| 118 |
+
try:
|
| 119 |
+
df = yf.download(ticker, period="2y", progress=False, auto_adjust=True)
|
| 120 |
+
if df is None or df.empty:
|
| 121 |
+
result["reason"] = "no_data"
|
| 122 |
+
return result
|
| 123 |
+
|
| 124 |
+
data_days = len(df)
|
| 125 |
+
result["data_days"] = data_days
|
| 126 |
+
if data_days < config.min_data_days:
|
| 127 |
+
result["reason"] = f"insufficient_data ({data_days} < {config.min_data_days} days)"
|
| 128 |
+
return result
|
| 129 |
+
|
| 130 |
+
avg_vol = 0.0
|
| 131 |
+
if "Volume" in df.columns:
|
| 132 |
+
avg_vol_raw = df["Volume"].mean()
|
| 133 |
+
avg_vol = float(avg_vol_raw.iloc[0]) if hasattr(avg_vol_raw, "iloc") else float(avg_vol_raw)
|
| 134 |
+
result["avg_volume"] = round(avg_vol, 0)
|
| 135 |
+
if avg_vol < config.min_avg_volume:
|
| 136 |
+
result["reason"] = f"low_volume ({avg_vol:,.0f} < {config.min_avg_volume:,.0f})"
|
| 137 |
+
return result
|
| 138 |
+
|
| 139 |
+
last_price = float(df["Close"].iloc[-1].iloc[0]) if hasattr(df["Close"].iloc[-1], "iloc") else float(df["Close"].iloc[-1])
|
| 140 |
+
result["last_price"] = round(last_price, 2)
|
| 141 |
+
if last_price < config.min_price:
|
| 142 |
+
result["reason"] = f"below_min_price (price={last_price:.2f} < {config.min_price:.2f})"
|
| 143 |
+
return result
|
| 144 |
+
|
| 145 |
+
result["passed"] = True
|
| 146 |
+
result["reason"] = "OK"
|
| 147 |
+
return result
|
| 148 |
+
except Exception as exc:
|
| 149 |
+
result["reason"] = f"error: {exc}"
|
| 150 |
+
return result
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def stage2_backtest(symbol: str, config: ScanConfig, logger: logging.Logger) -> Dict[str, Any]:
|
| 154 |
+
from analysis.walk_forward_backtest import walk_forward_backtest
|
| 155 |
+
|
| 156 |
+
result: Dict[str, Any] = {
|
| 157 |
+
"market_id": config.market_id,
|
| 158 |
+
"symbol": symbol,
|
| 159 |
+
"eligible": False,
|
| 160 |
+
"reason": "",
|
| 161 |
+
"evaluated_at": datetime.now(timezone.utc).isoformat(),
|
| 162 |
+
}
|
| 163 |
+
|
| 164 |
+
try:
|
| 165 |
+
_, metrics = walk_forward_backtest(
|
| 166 |
+
symbol=symbol,
|
| 167 |
+
start_date=config.backtest_start,
|
| 168 |
+
end_date=config.backtest_end,
|
| 169 |
+
market_id=config.market_id,
|
| 170 |
+
days_ahead=config.days_ahead,
|
| 171 |
+
train_window=config.train_window,
|
| 172 |
+
model_type="rf",
|
| 173 |
+
use_technical_gate=True,
|
| 174 |
+
initial_capital=100_000.0,
|
| 175 |
+
commission_bps=10.0,
|
| 176 |
+
slippage_bps=10.0,
|
| 177 |
+
exit_rule="signal_or_fixed",
|
| 178 |
+
max_hold_days=config.days_ahead,
|
| 179 |
+
stop_loss_pct=0.05,
|
| 180 |
+
take_profit_pct=0.10,
|
| 181 |
+
trailing_stop_pct=0.07,
|
| 182 |
+
max_position_pct=0.50,
|
| 183 |
+
max_risk_per_trade_pct=0.02,
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
dir_acc = metrics["direction_accuracy"]
|
| 187 |
+
sharpe = metrics["sharpe"]
|
| 188 |
+
hit_rate = metrics.get("hit_rate_pct", 0.0)
|
| 189 |
+
total_ret = metrics["total_return_pct"]
|
| 190 |
+
trades = metrics["trades_count"]
|
| 191 |
+
max_dd = metrics.get("max_drawdown_pct", 0.0)
|
| 192 |
+
|
| 193 |
+
result.update({
|
| 194 |
+
"dir_acc": round(dir_acc, 4),
|
| 195 |
+
"sharpe": round(sharpe, 3),
|
| 196 |
+
"hit_rate": round(hit_rate, 1),
|
| 197 |
+
"total_return_pct": round(total_ret, 1),
|
| 198 |
+
"trades": trades,
|
| 199 |
+
"max_drawdown_pct": round(max_dd, 1),
|
| 200 |
+
})
|
| 201 |
+
|
| 202 |
+
reasons: List[str] = []
|
| 203 |
+
if dir_acc < config.min_dir_acc:
|
| 204 |
+
reasons.append(f"dir_acc={dir_acc:.1%}<{config.min_dir_acc:.0%}")
|
| 205 |
+
if sharpe < config.min_sharpe:
|
| 206 |
+
reasons.append(f"sharpe={sharpe:.3f}<{config.min_sharpe}")
|
| 207 |
+
if hit_rate < config.min_hit_rate:
|
| 208 |
+
reasons.append(f"hit_rate={hit_rate:.1f}%<{config.min_hit_rate}%")
|
| 209 |
+
if trades < config.min_trades:
|
| 210 |
+
reasons.append(f"trades={trades}<{config.min_trades}")
|
| 211 |
+
|
| 212 |
+
result["eligible"] = len(reasons) == 0
|
| 213 |
+
result["reason"] = "; ".join(reasons) if reasons else "OK"
|
| 214 |
+
return result
|
| 215 |
+
except Exception as exc:
|
| 216 |
+
result["reason"] = f"backtest_error: {exc}"
|
| 217 |
+
logger.warning("Stage 2 failed for %s: %s", symbol, exc)
|
| 218 |
+
return result
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def get_universe(config: ScanConfig, logger: logging.Logger) -> List[str]:
|
| 222 |
+
if config.symbols:
|
| 223 |
+
symbols = list(dict.fromkeys(config.symbols))
|
| 224 |
+
if config.max_symbols:
|
| 225 |
+
symbols = symbols[: config.max_symbols]
|
| 226 |
+
logger.info("Using explicit symbol subset for %s/%s: %s", config.market_id, config.universe_name, ", ".join(symbols))
|
| 227 |
+
return symbols
|
| 228 |
+
try:
|
| 229 |
+
result = get_market_universe(config.market_id, config.universe_name)
|
| 230 |
+
symbols = result.symbols
|
| 231 |
+
if config.max_symbols:
|
| 232 |
+
symbols = symbols[: config.max_symbols]
|
| 233 |
+
logger.info(
|
| 234 |
+
"Fetched %d stocks from %s/%s, limited to first %d symbols",
|
| 235 |
+
len(result.symbols), result.market_id, result.name, len(symbols),
|
| 236 |
+
)
|
| 237 |
+
else:
|
| 238 |
+
logger.info("Fetched %d stocks from %s/%s", len(result.symbols), result.market_id, result.name)
|
| 239 |
+
return symbols
|
| 240 |
+
except Exception as exc:
|
| 241 |
+
logger.error("Failed to fetch universe %s/%s: %s", config.market_id, config.universe_name, exc)
|
| 242 |
+
if config.market_id == "bist":
|
| 243 |
+
return [
|
| 244 |
+
"THYAO", "AKBNK", "GARAN", "EREGL", "SISE",
|
| 245 |
+
"TUPRS", "KCHOL", "ASELS", "BIMAS", "SAHOL",
|
| 246 |
+
"YKBNK", "HALKB", "VAKBN", "TCELL", "ARCLK",
|
| 247 |
+
"PETKM", "TOASO", "KOZAA", "KOZAL", "SASA",
|
| 248 |
+
"TAVHL", "TTKOM", "ENKAI", "FROTO", "EKGYO",
|
| 249 |
+
"PGSUS", "SOKM", "DOHOL", "GUBRF", "ISCTR",
|
| 250 |
+
]
|
| 251 |
+
if config.market_id == "us":
|
| 252 |
+
return [
|
| 253 |
+
"AAPL", "MSFT", "AMZN", "GOOGL", "META",
|
| 254 |
+
"NVDA", "TSLA", "BRK-B", "JPM", "JNJ",
|
| 255 |
+
"V", "UNH", "PG", "HD", "MA",
|
| 256 |
+
"DIS", "BAC", "XOM", "PFE", "CSCO",
|
| 257 |
+
"ABBV", "AVGO", "KO", "PEP", "TMO",
|
| 258 |
+
"COST", "MRK", "WMT", "ABT", "CRM",
|
| 259 |
+
]
|
| 260 |
+
return []
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
def run_scan(config: ScanConfig, logger: logging.Logger, *, force: bool = False, stage1_only: bool = False) -> Dict[str, Any]:
|
| 264 |
+
symbols = get_universe(config, logger)
|
| 265 |
+
if not symbols:
|
| 266 |
+
logger.error("No symbols found for universe: %s", config.universe_name)
|
| 267 |
+
return {}
|
| 268 |
+
|
| 269 |
+
data = _load_results(config)
|
| 270 |
+
existing_key = data.get("universe_key") or data.get("universe")
|
| 271 |
+
is_same_universe = existing_key == config.universe_key
|
| 272 |
+
if not is_same_universe or force:
|
| 273 |
+
logger.info("Starting fresh scan for %s/%s (%d stocks)", config.market_id, config.universe_name, len(symbols))
|
| 274 |
+
data = {
|
| 275 |
+
"market_id": config.market_id,
|
| 276 |
+
"scan_started": datetime.now(timezone.utc).isoformat(),
|
| 277 |
+
"universe": config.universe_name,
|
| 278 |
+
"universe_key": config.universe_key,
|
| 279 |
+
"total_stocks": len(symbols),
|
| 280 |
+
"stage1": {},
|
| 281 |
+
"stage2": {},
|
| 282 |
+
"completed": False,
|
| 283 |
+
}
|
| 284 |
+
_save_results(config, data)
|
| 285 |
+
else:
|
| 286 |
+
s1_done = len(data.get("stage1", {}))
|
| 287 |
+
s2_done = len(data.get("stage2", {}))
|
| 288 |
+
logger.info("Resuming scan: %d/%d Stage1, %d Stage2 done", s1_done, len(symbols), s2_done)
|
| 289 |
+
|
| 290 |
+
logger.info("=" * 60)
|
| 291 |
+
logger.info("STAGE 1: Liquidity & Data Quality Pre-Filter")
|
| 292 |
+
logger.info("=" * 60)
|
| 293 |
+
|
| 294 |
+
s1_todo = [s for s in symbols if s not in data.get("stage1", {})]
|
| 295 |
+
total_s1 = len(symbols)
|
| 296 |
+
done_s1 = total_s1 - len(s1_todo)
|
| 297 |
+
|
| 298 |
+
for i, sym in enumerate(s1_todo, start=done_s1 + 1):
|
| 299 |
+
t0 = time.time()
|
| 300 |
+
result = stage1_prefilter(sym, config)
|
| 301 |
+
elapsed = time.time() - t0
|
| 302 |
+
status = "PASS" if result["passed"] else f"FAIL ({result['reason']})"
|
| 303 |
+
logger.info("[Stage1 %d/%d] %s: %s (%.1fs)", i, total_s1, sym, status, elapsed)
|
| 304 |
+
data.setdefault("stage1", {})[sym] = result
|
| 305 |
+
_save_results(config, data)
|
| 306 |
+
gc.collect()
|
| 307 |
+
|
| 308 |
+
s1_passed = [s for s, v in data["stage1"].items() if v.get("passed")]
|
| 309 |
+
s1_failed = [s for s, v in data["stage1"].items() if not v.get("passed")]
|
| 310 |
+
logger.info("")
|
| 311 |
+
logger.info("Stage 1 Results: %d PASS / %d FAIL out of %d", len(s1_passed), len(s1_failed), total_s1)
|
| 312 |
+
logger.info("Passed: %s", ", ".join(sorted(s1_passed)))
|
| 313 |
+
logger.info("")
|
| 314 |
+
|
| 315 |
+
if stage1_only:
|
| 316 |
+
data["completed"] = True
|
| 317 |
+
_save_results(config, data)
|
| 318 |
+
_publish_final_if_complete(config, data)
|
| 319 |
+
return data
|
| 320 |
+
|
| 321 |
+
logger.info("=" * 60)
|
| 322 |
+
logger.info("STAGE 2: ML Walk-Forward Backtest")
|
| 323 |
+
logger.info("=" * 60)
|
| 324 |
+
|
| 325 |
+
s2_todo = [s for s in s1_passed if s not in data.get("stage2", {})]
|
| 326 |
+
total_s2 = len(s1_passed)
|
| 327 |
+
done_s2 = total_s2 - len(s2_todo)
|
| 328 |
+
times_s2: List[float] = []
|
| 329 |
+
|
| 330 |
+
for i, sym in enumerate(s2_todo, start=done_s2 + 1):
|
| 331 |
+
t0 = time.time()
|
| 332 |
+
result = stage2_backtest(sym, config, logger)
|
| 333 |
+
elapsed = time.time() - t0
|
| 334 |
+
times_s2.append(elapsed)
|
| 335 |
+
|
| 336 |
+
if result["eligible"]:
|
| 337 |
+
status = (
|
| 338 |
+
f"ELIGIBLE (ret={result.get('total_return_pct', 0):.1f}%, "
|
| 339 |
+
f"sharpe={result.get('sharpe', 0):.3f}, "
|
| 340 |
+
f"hit={result.get('hit_rate', 0):.0f}%)"
|
| 341 |
+
)
|
| 342 |
+
else:
|
| 343 |
+
status = f"EXCLUDED ({result['reason']})"
|
| 344 |
+
|
| 345 |
+
avg_time = sum(times_s2) / len(times_s2)
|
| 346 |
+
remaining = total_s2 - i
|
| 347 |
+
eta_min = (remaining * avg_time) / 60
|
| 348 |
+
logger.info("[Stage2 %d/%d] %s: %s (%.1fs, ETA: %.0f min)", i, total_s2, sym, status, elapsed, eta_min)
|
| 349 |
+
data.setdefault("stage2", {})[sym] = result
|
| 350 |
+
_save_results(config, data)
|
| 351 |
+
gc.collect()
|
| 352 |
+
|
| 353 |
+
data["completed"] = True
|
| 354 |
+
data["scan_finished"] = datetime.now(timezone.utc).isoformat()
|
| 355 |
+
_save_results(config, data)
|
| 356 |
+
_publish_final_if_complete(config, data)
|
| 357 |
+
print_report(config, logger, data)
|
| 358 |
+
return data
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
def print_report(config: ScanConfig, logger: logging.Logger, data: Optional[Dict[str, Any]] = None) -> None:
|
| 362 |
+
if data is None:
|
| 363 |
+
data = _load_results(config)
|
| 364 |
+
|
| 365 |
+
s2 = data.get("stage2", {})
|
| 366 |
+
if not s2:
|
| 367 |
+
logger.info("No Stage 2 results found. Run scan first.")
|
| 368 |
+
return
|
| 369 |
+
|
| 370 |
+
eligible = {s: v for s, v in s2.items() if v.get("eligible")}
|
| 371 |
+
excluded = {s: v for s, v in s2.items() if not v.get("eligible")}
|
| 372 |
+
|
| 373 |
+
print("\n" + "=" * 70)
|
| 374 |
+
print(f" {config.title} SCAN RESULTS — {data.get('universe', '?').upper()}")
|
| 375 |
+
print("=" * 70)
|
| 376 |
+
s1 = data.get("stage1", {})
|
| 377 |
+
s1_pass = sum(1 for v in s1.values() if v.get("passed"))
|
| 378 |
+
s1_fail = sum(1 for v in s1.values() if not v.get("passed"))
|
| 379 |
+
print(f"\nStage 1 (Liquidity Filter): {s1_pass} pass / {s1_fail} fail / {len(s1)} total")
|
| 380 |
+
print(f"Stage 2 (ML Backtest): {len(eligible)} eligible / {len(excluded)} excluded / {len(s2)} tested")
|
| 381 |
+
|
| 382 |
+
if eligible:
|
| 383 |
+
print(f"\n{'─' * 70}")
|
| 384 |
+
print(f" ELIGIBLE STOCKS ({len(eligible)})")
|
| 385 |
+
print(f"{'─' * 70}")
|
| 386 |
+
print(f" {'Symbol':<10} {'Return%':>9} {'Sharpe':>8} {'HitRate%':>9} {'MaxDD%':>8} {'Trades':>7}")
|
| 387 |
+
print(f" {'─'*10} {'─'*9} {'─'*8} {'─'*9} {'─'*8} {'─'*7}")
|
| 388 |
+
sorted_eligible = sorted(eligible.items(), key=lambda kv: kv[1].get("sharpe", -999), reverse=True)
|
| 389 |
+
for sym, metrics in sorted_eligible:
|
| 390 |
+
print(
|
| 391 |
+
f" {sym:<10} {metrics.get('total_return_pct', 0):>+8.1f}% "
|
| 392 |
+
f"{metrics.get('sharpe', 0):>8.3f} "
|
| 393 |
+
f"{metrics.get('hit_rate', 0):>8.1f}% "
|
| 394 |
+
f"{metrics.get('max_drawdown_pct', 0):>7.1f}% "
|
| 395 |
+
f"{metrics.get('trades', 0):>7d}"
|
| 396 |
+
)
|
| 397 |
+
|
| 398 |
+
if excluded:
|
| 399 |
+
print(f"\n{'─' * 70}")
|
| 400 |
+
print(f" EXCLUDED STOCKS ({len(excluded)})")
|
| 401 |
+
print(f"{'─' * 70}")
|
| 402 |
+
for sym, metrics in sorted(excluded.items()):
|
| 403 |
+
print(f" {sym:<10} {metrics.get('reason', 'unknown')}")
|