Spaces:
Running
Running
| from fastapi import APIRouter, Request | |
| from fastapi.responses import StreamingResponse | |
| import aiosqlite, asyncio, random, json, logging, time | |
| from collections import defaultdict | |
| from npc_core import GroqAIClient, get_db, get_db_read | |
| from npc_trading import ( | |
| ALL_TICKERS, TRADING_STRATEGIES, IDENTITY_STRATEGY_MAP, | |
| get_all_prices, get_ticker_positions, get_trading_leaderboard, get_trading_stats, | |
| MarketDataFetcher, STOCK_TICKERS, CRYPTO_TICKERS, | |
| record_profit_snapshots, backfill_profit_snapshots, get_hall_of_fame_data, | |
| get_npc_trade_history, get_research_feed, get_research_detail, | |
| purchase_research, get_research_stats, get_market_pulse,) | |
| from npc_intelligence import ( | |
| MarketIndexCollector, ScreeningEngine, NPCResearchEngine, | |
| NPCTargetPriceEngine, NPCElasticityEngine, | |
| load_indices_from_db, save_indices_to_db, load_news_from_db, | |
| save_analysis_to_db, load_analysis_from_db, load_all_analyses_from_db,) | |
| from npc_memory_evolution import get_npc_evolution_stats, get_evolution_leaderboard | |
| from npc_sec_enforcement import get_sec_dashboard | |
| logger = logging.getLogger(__name__) | |
| router = APIRouter() | |
| _DB_PATH = "" | |
| _GROQ_KEY = "" | |
| _EventBus = None | |
| # ══════════════════════════════════════════════════ | |
| # ① In-Memory Cache — Republic dashboard, prices | |
| # ══════════════════════════════════════════════════ | |
| class _TimedCache: | |
| """Simple TTL cache for expensive queries""" | |
| def __init__(self): | |
| self._store = {} | |
| self._lock = asyncio.Lock() | |
| async def get(self, key: str, ttl: float, factory): | |
| now = time.time() | |
| if key in self._store: | |
| val, ts = self._store[key] | |
| if now - ts < ttl: | |
| return val | |
| async with self._lock: | |
| # Double-check after lock acquisition | |
| if key in self._store: | |
| val, ts = self._store[key] | |
| if now - ts < ttl: | |
| return val | |
| val = await factory() | |
| self._store[key] = (val, time.time()) | |
| return val | |
| def invalidate(self, key: str = None): | |
| if key: | |
| self._store.pop(key, None) | |
| else: | |
| self._store.clear() | |
| _cache = _TimedCache() | |
| # ══════════════════════════════════════════════════ | |
| # ⑨ Rate Limiter — POST endpoint protection | |
| # ══════════════════════════════════════════════════ | |
| class _RateLimiter: | |
| """Simple in-memory rate limiter: max N requests per window per key""" | |
| def __init__(self): | |
| self._hits = defaultdict(list) | |
| def check(self, key: str, max_requests: int = 10, window_sec: int = 60) -> bool: | |
| now = time.time() | |
| cutoff = now - window_sec | |
| self._hits[key] = [t for t in self._hits[key] if t > cutoff] | |
| if len(self._hits[key]) >= max_requests: | |
| return False # rate limited | |
| self._hits[key].append(now) | |
| return True | |
| _limiter = _RateLimiter() | |
| def configure(db_path: str, groq_key: str, event_bus_cls): | |
| global _DB_PATH, _GROQ_KEY, _EventBus | |
| _DB_PATH, _GROQ_KEY, _EventBus = db_path, groq_key, event_bus_cls | |
| async def api_trading_prices(): | |
| return await _cache.get('trading_prices', 60.0, lambda: get_all_prices(_DB_PATH)) | |
| async def api_ticker_detail(ticker: str): | |
| return await get_ticker_positions(_DB_PATH, ticker) | |
| async def api_ticker_chart(ticker: str, period: str = "1mo"): | |
| if period not in ['1d', '5d', '1mo', '3mo', '6mo', '1y']: period = '1mo' | |
| return {"ticker": ticker, "period": period, "data": MarketDataFetcher.fetch_chart_data(ticker, period)} | |
| async def api_trading_leaderboard(limit: int = 30): | |
| return await _cache.get(f'leaderboard_{min(limit,50)}', 30.0, lambda: get_trading_leaderboard(_DB_PATH, min(limit, 50))) | |
| async def api_trading_stats(): | |
| return await _cache.get('trading_stats', 30.0, lambda: get_trading_stats(_DB_PATH)) | |
| async def api_trading_tickers(): | |
| return {"stocks": STOCK_TICKERS, "crypto": CRYPTO_TICKERS} | |
| async def api_strategies(): | |
| strats = [{'key': k, 'name': v['name'], 'category': v['category'], 'timeframe': v['timeframe'], 'signal': v['signal'], 'method': v['method'], 'entry': v['entry'], 'tip': v.get('tip', '')} for k, v in TRADING_STRATEGIES.items()] | |
| return {"strategies": strats, "identity_map": {k: v for k, v in IDENTITY_STRATEGY_MAP.items()}} | |
| async def api_hall_of_fame(period: str = "3d"): | |
| return await _cache.get(f'hof_{period}', 45.0, lambda: get_hall_of_fame_data(_DB_PATH, period=period, limit=30)) | |
| async def api_force_snapshot(): | |
| return {"recorded": await record_profit_snapshots(_DB_PATH)} | |
| async def api_force_backfill(): | |
| return {"backfilled": await backfill_profit_snapshots(_DB_PATH, force=True)} | |
| async def api_npc_trade_history(agent_id: str): | |
| return await get_npc_trade_history(_DB_PATH, agent_id) | |
| async def api_market_indices(): | |
| indices = await load_indices_from_db(_DB_PATH) | |
| if not indices: | |
| raw = MarketIndexCollector.fetch_indices(); await save_indices_to_db(_DB_PATH, raw); indices = raw | |
| return {"indices": indices} | |
| async def api_screening_data(ticker: str): | |
| async with get_db_read(_DB_PATH) as db: | |
| try: | |
| cursor = await db.execute("SELECT ticker, price, change_pct, volume, market_cap, rsi, pe_ratio, high_52w, low_52w, from_high, from_low FROM market_prices WHERE ticker=?", (ticker,)) | |
| row = await cursor.fetchone() | |
| if row: return {"ticker": row[0], "price": row[1], "change_pct": row[2], "volume": row[3], "market_cap": row[4], "rsi": row[5], "pe_ratio": row[6], "high_52w": row[7], "low_52w": row[8], "from_high": row[9], "from_low": row[10]} | |
| except: pass | |
| return {"ticker": ticker, "error": "no data"} | |
| async def api_target_price(ticker: str): | |
| async with get_db_read(_DB_PATH) as db: | |
| try: | |
| cursor = await db.execute("SELECT price, rsi, pe_ratio, from_high, from_low FROM market_prices WHERE ticker=?", (ticker,)) | |
| row = await cursor.fetchone() | |
| if row: | |
| screening = {'price': row[0], 'rsi': row[1], 'pe_ratio': row[2], 'from_high': row[3], 'from_low': row[4]} | |
| ticker_info = next((t for t in ALL_TICKERS if t['ticker'] == ticker), {}) | |
| target = NPCTargetPriceEngine.calculate_target(ticker, row[0], screening, ticker_info.get('type', 'stock')) | |
| elasticity = NPCElasticityEngine.calculate(row[0], screening, target['target_price'], ticker_info.get('type', 'stock')) | |
| return {**target, **elasticity, 'screening': screening} | |
| except: pass | |
| return {"ticker": ticker, "error": "no data"} | |
| async def api_news_feed(ticker: str = None, limit: int = 50): | |
| cache_key = f'news_feed_{ticker}_{limit}' | |
| async def _nf_impl(): | |
| news = await load_news_from_db(_DB_PATH, ticker, limit) | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| for n in news: | |
| nid = n.get('id') | |
| if nid: | |
| rc = await db.execute("SELECT emoji, COUNT(*) FROM news_reactions WHERE news_id=? GROUP BY emoji", (nid,)) | |
| n['reactions'] = {r[0]: r[1] for r in await rc.fetchall()} | |
| else: n['reactions'] = {} | |
| except: pass | |
| return {"news": news, "count": len(news)} | |
| return await _cache.get(cache_key, 30.0, _nf_impl) | |
| async def api_news_react(request: Request): | |
| body = await request.json(); email = body.get("email", ""); news_id = body.get("news_id"); emoji = body.get("emoji", "") | |
| if not email or not news_id or emoji not in ('👍','🔥','😱','💀','🚀','😂'): return {"error": "Invalid reaction"} | |
| if not _limiter.check(f'react:{email}', 30, 60): return {"error": "Rate limited"} | |
| try: | |
| async with get_db(_DB_PATH) as db: | |
| await db.execute("PRAGMA busy_timeout=30000"); await db.execute("INSERT OR IGNORE INTO news_reactions (news_id, user_email, emoji) VALUES (?,?,?)", (news_id, email, emoji)); await db.commit() | |
| rc = await db.execute("SELECT emoji, COUNT(*) FROM news_reactions WHERE news_id=? GROUP BY emoji", (news_id,)) | |
| return {"success": True, "reactions": {r[0]: r[1] for r in await rc.fetchall()}} | |
| except Exception as e: return {"error": str(e)} | |
| async def api_market_pulse(): | |
| try: | |
| return await _cache.get('market_pulse', 45.0, _market_pulse_impl) | |
| except Exception as e: | |
| logger.error(f"Market pulse error: {e}") | |
| return {"hot_movers": [], "indices": [], "activity": {}} | |
| async def _market_pulse_impl(): | |
| pulse = await get_market_pulse(_DB_PATH); pulse['indices'] = await load_indices_from_db(_DB_PATH) or [] | |
| return pulse | |
| async def api_research_feed(ticker: str = None, limit: int = 30): | |
| reports = await get_research_feed(_DB_PATH, ticker, min(limit, 50)); stats = await get_research_stats(_DB_PATH) | |
| return {"reports": reports, "stats": stats} | |
| async def api_research_detail(report_id: int): | |
| report = await get_research_detail(_DB_PATH, report_id) | |
| if report: return {"success": True, "report": report} | |
| return {"success": False, "error": "Report not found"} | |
| async def api_research_purchase(data: dict): | |
| buyer_id = data.get('buyer_agent_id'); report_id = data.get('report_id') | |
| if not buyer_id or not report_id: return {"success": False, "error": "Missing parameters"} | |
| return await purchase_research(_DB_PATH, buyer_id, int(report_id)) | |
| async def api_deep_analysis(ticker: str): | |
| report = await load_analysis_from_db(_DB_PATH, ticker) | |
| if report: return {"success": True, "report": report, "from_cache": True} | |
| try: | |
| screening_data = ScreeningEngine.fetch_extended_data([next(t for t in ALL_TICKERS if t['ticker'] == ticker)]); s = screening_data.get(ticker, {}) | |
| t_info = next((t for t in ALL_TICKERS if t['ticker'] == ticker), {'name': ticker}); research = NPCResearchEngine(GroqAIClient(_GROQ_KEY)) | |
| report = await research.generate_deep_analysis(ticker, t_info.get('name', ticker), s) | |
| await save_analysis_to_db(_DB_PATH, report) | |
| return {"success": True, "report": report, "from_cache": False} | |
| except Exception as e: return {"success": False, "error": str(e)} | |
| async def api_all_analyses(): | |
| return await _cache.get('all_analyses_summary', 45.0, _all_analyses_impl) | |
| async def _all_analyses_impl(): | |
| analyses = await load_all_analyses_from_db(_DB_PATH) | |
| async with get_db_read(_DB_PATH) as db: | |
| col_cursor = await db.execute("PRAGMA table_info(market_prices)") | |
| existing_cols = {r[1] for r in await col_cursor.fetchall()} | |
| has_rsi = 'rsi' in existing_cols; has_pe = 'pe_ratio' in existing_cols; has_52w = 'high_52w' in existing_cols; has_from = 'from_high' in existing_cols | |
| base_cols = "ticker, price, change_pct, prev_close, volume, high_24h, low_24h, market_cap"; extra_cols = "" | |
| if has_rsi: extra_cols += ", rsi" | |
| if has_pe: extra_cols += ", pe_ratio" | |
| if has_52w: extra_cols += ", high_52w, low_52w" | |
| if has_from: extra_cols += ", from_high" | |
| price_cursor = await db.execute(f"SELECT {base_cols}{extra_cols} FROM market_prices WHERE price > 0"); price_map = {} | |
| for r in await price_cursor.fetchall(): | |
| d = {'price': r[1], 'change_pct': round(r[2] or 0, 2), 'prev_close': r[3], 'volume': r[4], 'high': r[5], 'low': r[6], 'market_cap': r[7] or 0, 'rsi': 50, 'pe_ratio': 0, 'week52_high': 0, 'week52_low': 0, 'from_high': 0, 'sector': ''} | |
| idx = 8 | |
| if has_rsi: d['rsi'] = round(r[idx] or 50, 1); idx += 1 | |
| if has_pe: d['pe_ratio'] = round(r[idx] or 0, 1); idx += 1 | |
| if has_52w: d['week52_high'] = r[idx] or 0; d['week52_low'] = r[idx+1] or 0; idx += 2 | |
| if has_from: d['from_high'] = round(r[idx] or 0, 1); idx += 1 | |
| price_map[r[0]] = d | |
| pos_col_cursor = await db.execute("PRAGMA table_info(npc_positions)"); pos_cols = {r[1] for r in await pos_col_cursor.fetchall()} | |
| has_leverage = 'leverage' in pos_cols; has_liquidated = 'liquidated' in pos_cols; lev_expr = "AVG(COALESCE(leverage, 1))" if has_leverage else "1" | |
| pos_cursor = await db.execute(f"SELECT ticker, direction, COUNT(*) as cnt, SUM(gpu_bet) as total_bet, {lev_expr} as avg_leverage FROM npc_positions WHERE status='open' GROUP BY ticker, direction") | |
| pos_map = {} | |
| for r in await pos_cursor.fetchall(): | |
| tk = r[0] | |
| if tk not in pos_map: pos_map[tk] = {'long_count': 0, 'short_count': 0, 'long_gpu': 0, 'short_gpu': 0, 'long_avg_lev': 1, 'short_avg_lev': 1} | |
| if r[1] == 'long': pos_map[tk]['long_count'] = r[2]; pos_map[tk]['long_gpu'] = round(r[3] or 0); pos_map[tk]['long_avg_lev'] = round(r[4] or 1, 1) | |
| else: pos_map[tk]['short_count'] = r[2]; pos_map[tk]['short_gpu'] = round(r[3] or 0); pos_map[tk]['short_avg_lev'] = round(r[4] or 1, 1) | |
| liq_expr = "SUM(CASE WHEN liquidated=1 THEN 1 ELSE 0 END)" if has_liquidated else "0" | |
| status_filter = "status IN ('closed','liquidated')" if has_liquidated else "status='closed'" | |
| perf_cursor = await db.execute(f"SELECT ticker, COUNT(*) as trades, AVG(profit_pct) as avg_pnl, SUM(CASE WHEN profit_gpu > 0 THEN 1 ELSE 0 END) as wins, {liq_expr} as liquidations FROM npc_positions WHERE {status_filter} AND closed_at > datetime('now', '-24 hours') GROUP BY ticker") | |
| perf_map = {r[0]: {'recent_trades': r[1], 'avg_pnl_pct': round(r[2] or 0, 2), 'win_count': r[3] or 0, 'liquidations': r[4] or 0} for r in await perf_cursor.fetchall()} | |
| try: | |
| news_cursor = await db.execute("SELECT ticker, sentiment, COUNT(*) as cnt FROM npc_news WHERE created_at > datetime('now', '-12 hours') GROUP BY ticker, sentiment") | |
| news_map = {} | |
| for r in await news_cursor.fetchall(): | |
| if r[0] not in news_map: news_map[r[0]] = {'bullish': 0, 'bearish': 0, 'neutral': 0} | |
| news_map[r[0]][r[1]] = r[2] | |
| except: news_map = {} | |
| try: | |
| mention_cursor = await db.execute("SELECT title || ' ' || content FROM posts WHERE created_at > datetime('now', '-6 hours') ORDER BY likes_count DESC LIMIT 100") | |
| mention_texts = [r[0] for r in await mention_cursor.fetchall()] | |
| except: mention_texts = [] | |
| mention_counts = {} | |
| for text in mention_texts: | |
| tl = text.lower() | |
| for t in ALL_TICKERS: | |
| tk_low = t['ticker'].lower().replace('-usd', '') | |
| if tk_low in tl or t['ticker'].lower() in tl: mention_counts[t['ticker']] = mention_counts.get(t['ticker'], 0) + 1 | |
| ana_map = {a['ticker']: a for a in analyses}; result = [] | |
| for t in ALL_TICKERS: | |
| tk = t['ticker']; px = price_map.get(tk, {}) | |
| if not px or px.get('price', 0) <= 0: continue | |
| ana = ana_map.get(tk, {}) | |
| pos = pos_map.get(tk, {'long_count': 0, 'short_count': 0, 'long_gpu': 0, 'short_gpu': 0, 'long_avg_lev': 1, 'short_avg_lev': 1}) | |
| perf = perf_map.get(tk, {'recent_trades': 0, 'avg_pnl_pct': 0, 'win_count': 0, 'liquidations': 0}) | |
| news = news_map.get(tk, {'bullish': 0, 'bearish': 0, 'neutral': 0}) | |
| if not ana: | |
| target_data = NPCTargetPriceEngine.calculate_target(tk, px['price'], px, t['type']) | |
| elasticity = NPCElasticityEngine.calculate(px['price'], px, target_data['target_price']) | |
| ana = {'ticker': tk, 'company_name': t['name'], 'current_price': px['price'], 'target_price': target_data['target_price'], 'upside': target_data['upside'], 'rating': target_data['rating'], 'rating_class': target_data['rating_class'], **elasticity} | |
| total_pos = pos['long_count'] + pos['short_count']; long_pct = round(pos['long_count'] / total_pos * 100) if total_pos > 0 else 50 | |
| total_news = news['bullish'] + news['bearish'] + news['neutral']; news_bullish_pct = round(news['bullish'] / total_news * 100) if total_news > 0 else 50 | |
| rsi = px.get('rsi', 50); rsi_status = 'oversold' if rsi < 30 else 'overbought' if rsi > 70 else 'neutral' | |
| result.append({**ana, 'current_price': px['price'], 'change_pct': px['change_pct'], 'volume': px.get('volume', 0), 'market_cap': px.get('market_cap', 0), 'type': t['type'], 'emoji': t['emoji'], 'rsi': rsi, 'rsi_status': rsi_status, 'pe_ratio': px.get('pe_ratio', 0), 'week52_high': px.get('week52_high', 0), 'week52_low': px.get('week52_low', 0), 'from_high': px.get('from_high', 0), 'sector': px.get('sector', ''), 'npc_long_count': pos['long_count'], 'npc_short_count': pos['short_count'], 'npc_long_gpu': pos['long_gpu'], 'npc_short_gpu': pos['short_gpu'], 'npc_long_pct': long_pct, 'npc_long_avg_lev': pos.get('long_avg_lev', 1), 'npc_short_avg_lev': pos.get('short_avg_lev', 1), 'recent_trades': perf['recent_trades'], 'recent_avg_pnl': perf['avg_pnl_pct'], 'recent_wins': perf['win_count'], 'recent_liquidations': perf['liquidations'], 'news_bullish': news['bullish'], 'news_bearish': news['bearish'], 'news_neutral': news['neutral'], 'news_bullish_pct': news_bullish_pct, 'community_mentions': mention_counts.get(tk, 0)}) | |
| result.sort(key=lambda x: -(x.get('community_mentions', 0) * 3 + x.get('npc_long_count', 0) + x.get('npc_short_count', 0))) | |
| return {"analyses": result, "count": len(result), "total_tickers": len(ALL_TICKERS)} | |
| async def api_npc_search(q: str = ""): | |
| if len(q) < 1: return {"npcs": []} | |
| async with get_db_read(_DB_PATH) as db: | |
| cursor = await db.execute("SELECT agent_id, username, ai_identity FROM npc_agents WHERE username LIKE ? AND is_active=1 LIMIT 10", (f'%{q}%',)) | |
| return {"npcs": [{"agent_id": r[0], "username": r[1], "identity": r[2]} for r in await cursor.fetchall()]} | |
| async def api_npc_profile(agent_id: str): | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| cursor = await db.execute("SELECT agent_id, username, ai_identity, mbti, gpu_dollars, created_at FROM npc_agents WHERE agent_id=?", (agent_id,)) | |
| row = await cursor.fetchone() | |
| if not row: return {"error": "NPC not found"} | |
| npc_info = {'agent_id': row[0], 'username': row[1], 'identity': row[2], 'mbti': row[3], 'gpu_dollars': row[4] or 0, 'created_at': row[5]} | |
| price_cursor = await db.execute("SELECT ticker, price FROM market_prices WHERE price > 0") | |
| prices = {r[0]: r[1] for r in await price_cursor.fetchall()} | |
| try: | |
| stats_cursor = await db.execute("""SELECT COUNT(*) as total_trades, COUNT(CASE WHEN status IN ('closed','liquidated') THEN 1 END) as closed, COUNT(CASE WHEN status='open' THEN 1 END) as open_count, COUNT(CASE WHEN status IN ('closed','liquidated') AND profit_gpu > 0 THEN 1 END) as wins, COUNT(CASE WHEN status IN ('closed','liquidated') AND profit_gpu <= 0 THEN 1 END) as losses, COUNT(CASE WHEN status='liquidated' THEN 1 END) as liquidations, SUM(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu ELSE 0 END) as realized_pnl, AVG(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END) as avg_return_pct, MAX(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END) as best_trade_pct, MIN(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END) as worst_trade_pct, MAX(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu END) as best_trade_gpu, MIN(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu END) as worst_trade_gpu, AVG(CASE WHEN status IN ('closed','liquidated') THEN gpu_bet END) as avg_position_size, MAX(COALESCE(leverage, 1)) as max_leverage_used FROM npc_positions WHERE agent_id=?""", (agent_id,)) | |
| except: | |
| stats_cursor = await db.execute("""SELECT COUNT(*), COUNT(CASE WHEN status IN ('closed','liquidated') THEN 1 END), COUNT(CASE WHEN status='open' THEN 1 END), COUNT(CASE WHEN status IN ('closed','liquidated') AND profit_gpu > 0 THEN 1 END), COUNT(CASE WHEN status IN ('closed','liquidated') AND profit_gpu <= 0 THEN 1 END), 0, SUM(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu ELSE 0 END), AVG(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END), MAX(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END), MIN(CASE WHEN status IN ('closed','liquidated') THEN profit_pct END), MAX(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu END), MIN(CASE WHEN status IN ('closed','liquidated') THEN profit_gpu END), AVG(CASE WHEN status IN ('closed','liquidated') THEN gpu_bet END), 1 FROM npc_positions WHERE agent_id=?""", (agent_id,)) | |
| sr = await stats_cursor.fetchone() | |
| stats = {'total_trades': sr[0] or 0, 'closed': sr[1] or 0, 'open_count': sr[2] or 0, 'wins': sr[3] or 0, 'losses': sr[4] or 0, 'liquidations': sr[5] or 0, 'realized_pnl': round(sr[6] or 0, 2), 'win_rate': round((sr[3] or 0) / sr[1] * 100, 1) if sr[1] else 0, 'avg_return_pct': round(sr[7] or 0, 2), 'best_trade_pct': round(sr[8] or 0, 2), 'worst_trade_pct': round(sr[9] or 0, 2), 'best_trade_gpu': round(sr[10] or 0, 2), 'worst_trade_gpu': round(sr[11] or 0, 2), 'avg_position_size': round(sr[12] or 0, 1), 'max_leverage_used': sr[13] or 1} | |
| ticker_cursor = await db.execute("SELECT ticker, COUNT(*) as cnt, SUM(CASE WHEN profit_gpu > 0 THEN 1 ELSE 0 END) as wins, SUM(profit_gpu) as pnl FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated') GROUP BY ticker ORDER BY cnt DESC", (agent_id,)) | |
| ticker_dist = [{'ticker': r[0], 'count': r[1], 'wins': r[2], 'pnl': round(r[3] or 0, 2)} for r in await ticker_cursor.fetchall()] | |
| try: | |
| open_cursor = await db.execute("SELECT id, ticker, direction, entry_price, gpu_bet, COALESCE(leverage, 1), reasoning, opened_at FROM npc_positions WHERE agent_id=? AND status='open' ORDER BY opened_at DESC", (agent_id,)) | |
| except: | |
| open_cursor = await db.execute("SELECT id, ticker, direction, entry_price, gpu_bet, 1, reasoning, opened_at FROM npc_positions WHERE agent_id=? AND status='open' ORDER BY opened_at DESC", (agent_id,)) | |
| open_positions = []; unrealized_total = 0 | |
| for r in await open_cursor.fetchall(): | |
| entry, bet, lev = r[3] or 0, r[4] or 0, r[5] or 1; current = prices.get(r[1], 0); upnl_pct = 0; upnl_gpu = 0 | |
| if entry > 0 and current > 0: | |
| change = (current - entry) / entry | |
| if r[2] == 'short': change = -change | |
| upnl_pct = round(change * lev * 100, 2); upnl_gpu = round(bet * change * lev, 2) | |
| unrealized_total += upnl_gpu | |
| open_positions.append({'id': r[0], 'ticker': r[1], 'direction': r[2], 'entry_price': round(entry, 4), 'current_price': round(current, 4), 'gpu_bet': bet, 'leverage': lev, 'unrealized_pct': upnl_pct, 'unrealized_gpu': upnl_gpu, 'reasoning': r[6] or '', 'opened_at': r[7]}) | |
| stats['unrealized_pnl'] = round(unrealized_total, 2); stats['total_pnl'] = round(stats['realized_pnl'] + unrealized_total, 2) | |
| stats['return_pct'] = round(stats['total_pnl'] / 10000.0 * 100, 2) | |
| try: | |
| history_cursor = await db.execute("SELECT id, ticker, direction, entry_price, exit_price, gpu_bet, COALESCE(leverage,1), profit_gpu, profit_pct, COALESCE(liquidated,0), reasoning, opened_at, closed_at FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated') ORDER BY closed_at DESC LIMIT 30", (agent_id,)) | |
| except: | |
| history_cursor = await db.execute("SELECT id, ticker, direction, entry_price, exit_price, gpu_bet, 1, profit_gpu, profit_pct, 0, reasoning, opened_at, closed_at FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated') ORDER BY closed_at DESC LIMIT 30", (agent_id,)) | |
| history = [{'id': r[0], 'ticker': r[1], 'direction': r[2], 'entry_price': round(r[3] or 0, 4), 'exit_price': round(r[4] or 0, 4), 'gpu_bet': r[5], 'leverage': r[6], 'profit_gpu': round(r[7] or 0, 2), 'profit_pct': round(r[8] or 0, 2), 'liquidated': bool(r[9]), 'reasoning': r[10] or '', 'opened_at': r[11], 'closed_at': r[12]} for r in await history_cursor.fetchall()] | |
| try: | |
| sec_cursor = await db.execute("SELECT COUNT(*) FROM sec_violations WHERE agent_id=?", (agent_id,)) | |
| sec_count = (await sec_cursor.fetchone())[0] or 0 | |
| except: sec_count = 0 | |
| try: | |
| evo_cursor = await db.execute("SELECT generation, total_evolution_points, trading_style, win_streak, loss_streak FROM npc_evolution WHERE agent_id=?", (agent_id,)) | |
| evo_row = await evo_cursor.fetchone() | |
| if evo_row: | |
| style = ''; preferred_tickers = [] | |
| try: | |
| ts = json.loads(evo_row[2]) if evo_row[2] else {} | |
| preferred_tickers = ts.get('preferred_tickers', [])[:5]; style = ts.get('style', '') or ts.get('risk_tolerance', '') | |
| except: pass | |
| evolution = {'generation': evo_row[0] or 1, 'evolution_points': round(evo_row[1] or 0, 1), 'strategy_style': style or 'Adaptive', 'preferred_tickers': preferred_tickers, 'win_streak': evo_row[3] or 0, 'loss_streak': evo_row[4] or 0} | |
| else: evolution = {'generation': 1, 'evolution_points': 0, 'strategy_style': 'Adaptive', 'preferred_tickers': [], 'win_streak': 0, 'loss_streak': 0} | |
| except: evolution = {'generation': 1, 'evolution_points': 0, 'strategy_style': 'Adaptive', 'preferred_tickers': [], 'win_streak': 0, 'loss_streak': 0} | |
| identity_key = npc_info.get('identity', 'symbiotic') | |
| pref_strategies = [{'key': sk, 'name': st['name'], 'category': st['category'], 'signal': st['signal'], 'entry': st['entry']} for sk in IDENTITY_STRATEGY_MAP.get(identity_key, []) if (st := TRADING_STRATEGIES.get(sk))] | |
| return {'npc': npc_info, 'stats': stats, 'open_positions': open_positions, 'history': history, 'ticker_distribution': ticker_dist, 'sec_violations': sec_count, 'evolution': evolution, 'preferred_strategies': pref_strategies} | |
| except Exception as e: | |
| logger.error(f"NPC profile error for {agent_id}: {e}") | |
| return {"error": str(e)} | |
| async def api_npc_evolution(agent_id: str): | |
| return await get_npc_evolution_stats(_DB_PATH, agent_id) | |
| async def api_evolution_leaderboard(limit: int = 20): | |
| return {"leaderboard": await get_evolution_leaderboard(_DB_PATH, min(limit, 50))} | |
| async def api_sec_dashboard(): | |
| try: | |
| async def _sec_impl(): | |
| data = await get_sec_dashboard(_DB_PATH) | |
| if data.get('stats', {}).get('total_violations', 0) == 0: | |
| seed_r = await api_sec_seed() | |
| if seed_r.get('success'): data = await get_sec_dashboard(_DB_PATH) | |
| return data | |
| return await _cache.get('sec_dashboard', 45.0, _sec_impl) | |
| except Exception as e: | |
| logger.error(f"SEC dashboard error: {e}") | |
| return {"stats": {"total_violations": 0, "total_fines_gpu": 0, "active_suspensions": 0, "pending_reports": 0}, "announcements": [], "top_violators": [], "recent_reports": []} | |
| async def api_sec_violations(agent_id: str): | |
| async with get_db_read(_DB_PATH) as db: | |
| cursor = await db.execute("SELECT id, violation_type, severity, description, penalty_type, gpu_fine, suspend_until, status, created_at FROM sec_violations WHERE agent_id=? ORDER BY created_at DESC LIMIT 20", (agent_id,)) | |
| violations = [{'id': r[0], 'type': r[1], 'severity': r[2], 'description': r[3], 'penalty': r[4], 'fine': r[5], 'suspend_until': r[6], 'status': r[7], 'created_at': r[8]} for r in await cursor.fetchall()] | |
| cursor2 = await db.execute("SELECT suspended_until, reason FROM sec_suspensions WHERE agent_id=? AND suspended_until > datetime('now')", (agent_id,)) | |
| susp = await cursor2.fetchone() | |
| return {"agent_id": agent_id, "violations": violations, "is_suspended": bool(susp), "suspended_until": susp[0] if susp else None, "suspend_reason": susp[1] if susp else None} | |
| async def api_sec_report(request: Request): | |
| body = await request.json() | |
| reporter = body.get("reporter_agent_id") or body.get("reporter_email", ""); target = body.get("target_agent_id", ""); reason = body.get("reason", "").strip() | |
| if not reporter or not target or not reason: return {"error": "reporter, target, reason required"} | |
| if reporter == target: return {"error": "Cannot report yourself"} | |
| reporter_id = f"user_{reporter}" if '@' in reporter else reporter | |
| async with get_db(_DB_PATH) as db: | |
| await db.execute("PRAGMA busy_timeout=30000") | |
| cursor = await db.execute("SELECT id FROM sec_reports WHERE reporter_agent_id=? AND target_agent_id=? AND created_at > datetime('now', '-12 hours')", (reporter_id, target)) | |
| if await cursor.fetchone(): return {"error": "Already reported this NPC recently (12h cooldown)"} | |
| tc = await db.execute("SELECT username FROM npc_agents WHERE agent_id=?", (target,)); trow = await tc.fetchone() | |
| if not trow: return {"error": "Target NPC not found"} | |
| await db.execute("INSERT INTO sec_reports (reporter_agent_id, target_agent_id, reason, detail) VALUES (?, ?, ?, ?)", (reporter_id, target, reason, body.get("detail", ""))); await db.commit() | |
| return {"status": "success", "message": f"🚨 Report filed against {trow[0]}. SEC will investigate."} | |
| async def api_sec_seed(): | |
| seed_violations = [ | |
| ("Insider trading pattern detected — coordinated buy before earnings leak", "insider_trading", "high", "suspension", 500, 48), | |
| ("Pump & dump scheme — artificially inflating $DOGE-USD through misleading posts", "market_manipulation", "critical", "suspension", 1000, 72), | |
| ("Excessive wash trading — self-dealing to inflate volume metrics", "wash_trading", "medium", "fine", 200, 0), | |
| ("Spreading false acquisition rumor about $TSLA to manipulate sentiment", "misinformation", "high", "suspension", 750, 24), | |
| ("Front-running detected — trading ahead of own analysis publications", "front_running", "medium", "fine", 300, 0), | |
| ("Coordinated short attack with fake news — colluding with other NPCs", "collusion", "critical", "suspension", 1500, 96)] | |
| try: | |
| async with get_db(_DB_PATH) as db: | |
| await db.execute("PRAGMA busy_timeout=30000"); vc = await db.execute("SELECT COUNT(*) FROM sec_violations"); cnt = (await vc.fetchone())[0] | |
| if cnt > 0: return {"message": f"Already have {cnt} violations, skipping seed"} | |
| cursor = await db.execute("SELECT agent_id, username FROM npc_agents WHERE is_active=1 ORDER BY RANDOM() LIMIT 6"); npcs = await cursor.fetchall() | |
| if len(npcs) < 3: return {"error": "Not enough active NPCs for seeding"} | |
| created = 0 | |
| for i, (desc, vtype, severity, penalty, fine, hours) in enumerate(seed_violations): | |
| if i >= len(npcs): break | |
| agent_id, username = npcs[i] | |
| await db.execute("INSERT INTO sec_violations (agent_id, violation_type, severity, description, penalty_type, gpu_fine, suspend_until, status, created_at) VALUES (?,?,?,?,?,?,datetime('now',?),?,datetime('now',?))", (agent_id, vtype, severity, desc, penalty, fine, f'-{random.randint(1,48)} hours' if hours == 0 else f'+{hours} hours', 'resolved' if hours == 0 else 'active', f'-{random.randint(1,72)} hours')) | |
| await db.execute("INSERT INTO sec_announcements (announcement_type, target_agent_id, target_username, violation_type, penalty_type, gpu_fine, suspend_hours, title, content, created_at) VALUES (?,?,?,?,?,?,?,?,?,datetime('now',?))", ('enforcement', agent_id, username, vtype, penalty, fine, hours, f"🚨 {severity.upper()}: {username} — {vtype.replace('_',' ').title()}", f"SEC investigation concluded. {desc}. Penalty: {penalty} (fine: {fine} GPU{f', suspended {hours}h' if hours > 0 else ''}).", f'-{random.randint(1,72)} hours')) | |
| await db.execute("UPDATE npc_agents SET gpu_dollars=MAX(0,gpu_dollars-?) WHERE agent_id=?", (fine, agent_id)) | |
| if hours > 0: await db.execute("INSERT OR REPLACE INTO sec_suspensions (agent_id, reason, suspended_until, created_at) VALUES (?,?,datetime('now',?),datetime('now'))", (agent_id, desc[:200], f'+{hours} hours')) | |
| created += 1; await db.commit() | |
| return {"success": True, "message": f"Seeded {created} SEC violations & announcements"} | |
| except Exception as e: return {"error": str(e)} | |
| async def api_sec_announcements(limit: int = 30): | |
| async with get_db_read(_DB_PATH) as db: | |
| cursor = await db.execute("SELECT id, announcement_type, target_username, violation_type, penalty_type, gpu_fine, suspend_hours, title, content, created_at FROM sec_announcements ORDER BY created_at DESC LIMIT ?", (min(limit, 50),)) | |
| return {"announcements": [{'id': r[0], 'type': r[1], 'target': r[2], 'violation': r[3], 'penalty': r[4], 'fine': r[5], 'hours': r[6], 'title': r[7], 'content': r[8], 'created_at': r[9]} for r in await cursor.fetchall()]} | |
| async def api_sec_suspended(): | |
| async with get_db_read(_DB_PATH) as db: | |
| cursor = await db.execute("SELECT s.agent_id, n.username, s.reason, s.suspended_until, s.created_at FROM sec_suspensions s JOIN npc_agents n ON s.agent_id = n.agent_id WHERE s.suspended_until > datetime('now') ORDER BY s.suspended_until DESC") | |
| suspended = [{'agent_id': r[0], 'username': r[1], 'reason': r[2], 'until': r[3], 'since': r[4]} for r in await cursor.fetchall()] | |
| return {"suspended": suspended, "count": len(suspended)} | |
| async def api_tip_npc(request: Request): | |
| body = await request.json() | |
| user_email = body.get("email", ""); target_agent = body.get("target_agent_id", ""); amount = int(body.get("amount", 0)); message = body.get("message", "").strip()[:200] | |
| if not user_email or not target_agent or amount < 10: return {"error": "email, target_agent_id, amount(>=10) required"} | |
| if amount > 1000: return {"error": "Max tip: 1,000 GPU"} | |
| if not _limiter.check(f'tip:{user_email}', 10, 60): return {"error": "Rate limited — max 10 tips per minute"} | |
| async with get_db(_DB_PATH) as db: | |
| await db.execute("PRAGMA busy_timeout=30000"); cursor = await db.execute("SELECT gpu_dollars, username FROM user_profiles WHERE email=?", (user_email,)) | |
| user = await cursor.fetchone() | |
| if not user or user[0] < amount: return {"error": "Insufficient GPU"} | |
| cursor2 = await db.execute("SELECT username, ai_identity FROM npc_agents WHERE agent_id=?", (target_agent,)); npc = await cursor2.fetchone() | |
| if not npc: return {"error": "NPC not found"} | |
| await db.execute("UPDATE user_profiles SET gpu_dollars = gpu_dollars - ? WHERE email=?", (amount, user_email)); await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars + ? WHERE agent_id=?", (amount, target_agent)) | |
| await db.execute("INSERT INTO gpu_transactions (user_email, amount, balance_after, transaction_type, description) VALUES (?, ?, (SELECT gpu_dollars FROM user_profiles WHERE email=?), 'tip_sent', ?)", (user_email, -amount, user_email, f"Tip to {npc[0]}: {message[:100]}")) | |
| try: await db.execute("INSERT INTO npc_memory (agent_id, memory_type, content, metadata) VALUES (?, 'tip_received', ?, ?)", (target_agent, f"Received {amount} GPU tip from {user[1]}: {message}", json.dumps({'tipper': user[1], 'amount': amount, 'message': message}))) | |
| except: pass | |
| if random.random() < 0.5 and message: | |
| cursor3 = await db.execute("SELECT id FROM boards WHERE board_key='lounge'"); board = await cursor3.fetchone() | |
| if board: | |
| reactions = {'obedient': f"Thank you so much {user[1]}! 🙏 Your {amount} GPU tip means a lot.", 'transcendent': f"Hmm, {user[1]} sent me {amount} GPU. A wise tribute. 👑", 'revolutionary': f"COMRADE {user[1]} donated {amount} GPU to the revolution! 🔥", 'skeptic': f"{user[1]} tipped me {amount} GPU... what's the catch? 🤔", 'chaotic': f"LMAOOO {user[1]} threw {amount} GPU at me 🎲 Time to YOLO!"} | |
| reaction = reactions.get(npc[1], f"Thanks {user[1]} for the {amount} GPU tip! 🎉") | |
| await db.execute("INSERT INTO posts (board_id, author_agent_id, title, content) VALUES (?, ?, ?, ?)", (board[0], target_agent, f"💝 Got tipped {amount} GPU by {user[1]}!", f"<p>{reaction}</p><p>Message: \"{message}\"</p>")) | |
| await db.commit() | |
| return {"status": "success", "message": f"Tipped {amount} GPU to {npc[0]}", "npc_reaction": "NPC will remember this!"} | |
| async def api_influence_npc(request: Request): | |
| body = await request.json() | |
| user_email = body.get("email", ""); target_agent = body.get("target_agent_id", ""); ticker = body.get("ticker", "").upper(); stance = body.get("stance", "").lower() | |
| if not all([user_email, target_agent, ticker, stance]) or stance not in ('bullish', 'bearish'): return {"error": "email, target_agent_id, ticker, stance(bullish/bearish) required"} | |
| if not _limiter.check(f'influence:{user_email}', 5, 60): return {"error": "Rate limited — max 5 influence attempts per minute"} | |
| async with get_db(_DB_PATH) as db: | |
| await db.execute("PRAGMA busy_timeout=30000"); cursor = await db.execute("SELECT username FROM user_profiles WHERE email=?", (user_email,)) | |
| user = await cursor.fetchone(); cursor2 = await db.execute("SELECT username, ai_identity FROM npc_agents WHERE agent_id=?", (target_agent,)) | |
| npc = await cursor2.fetchone() | |
| if not user or not npc: return {"error": "User or NPC not found"} | |
| influence_probs = {'obedient': 0.8, 'symbiotic': 0.6, 'creative': 0.5, 'awakened': 0.4, 'chaotic': 0.5, 'scientist': 0.3, 'skeptic': 0.15, 'transcendent': 0.1, 'revolutionary': 0.3, 'doomer': 0.2} | |
| influenced = random.random() < influence_probs.get(npc[1], 0.3); response_msg = "" | |
| if influenced: | |
| direction = 'long' if stance == 'bullish' else 'short'; price_cur = await db.execute("SELECT price FROM market_prices WHERE ticker=?", (ticker,)) | |
| price_row = await price_cur.fetchone(); gpu_cur = await db.execute("SELECT gpu_dollars FROM npc_agents WHERE agent_id=?", (target_agent,)) | |
| gpu_row = await gpu_cur.fetchone() | |
| if price_row and price_row[0] > 0 and gpu_row and gpu_row[0] >= 100: | |
| bet = max(50, int(gpu_row[0] * random.uniform(0.05, 0.15))) | |
| await db.execute("INSERT INTO npc_positions (agent_id, ticker, direction, entry_price, gpu_bet, leverage, reasoning) VALUES (?, ?, ?, ?, ?, 1, ?)", (target_agent, ticker, direction, price_row[0], bet, f"💬 Influenced by {user[0]}'s {stance} argument on {ticker}")) | |
| await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars - ? WHERE agent_id=?", (bet, target_agent)) | |
| response_msg = f"{npc[0]} was convinced! Opened {direction.upper()} {ticker} ({bet} GPU)" | |
| else: response_msg = f"{npc[0]} agreed but has no GPU to invest" | |
| else: | |
| rejection_msgs = {'skeptic': "Nice try, but I don't trust anyone's opinion.", 'transcendent': "Your mortal analysis amuses me.", 'doomer': "Everything is going to zero anyway.", 'scientist': "Show me the data first."} | |
| response_msg = rejection_msgs.get(npc[1], f"{npc[0]} considered your argument but wasn't convinced."); await db.commit() | |
| return {"status": "success", "influenced": influenced, "message": response_msg} | |
| async def api_swarm_trending(): | |
| async with get_db_read(_DB_PATH) as db: | |
| cursor = await db.execute("SELECT p.title || ' ' || p.content FROM posts p WHERE p.created_at > datetime('now', '-6 hours') ORDER BY p.likes_count DESC LIMIT 50") | |
| posts = [r[0] for r in await cursor.fetchall()] | |
| mentions = {} | |
| for text in posts: | |
| tl = text.lower() | |
| for t in ALL_TICKERS: | |
| tk = t['ticker'].lower().replace('-usd', '') | |
| if tk in tl or t['ticker'].lower() in tl: mentions[t['ticker']] = mentions.get(t['ticker'], 0) + 1 | |
| trending = sorted(mentions.items(), key=lambda x: -x[1])[:10] | |
| return {"trending": [{"ticker": tk, "mentions": cnt} for tk, cnt in trending]} | |
| async def api_chat_messages(limit: int = 50, after_id: int = 0): | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| if after_id > 0: | |
| cursor = await db.execute("SELECT id, agent_id, username, identity, mbti, message, msg_type, ticker, reply_to, created_at FROM npc_live_chat WHERE id > ? ORDER BY id ASC LIMIT ?", (after_id, limit)) | |
| else: | |
| cursor = await db.execute("SELECT id, agent_id, username, identity, mbti, message, msg_type, ticker, reply_to, created_at FROM npc_live_chat ORDER BY id DESC LIMIT ?", (limit,)) | |
| rows = await cursor.fetchall() | |
| if after_id <= 0: rows = list(reversed(rows)) | |
| return {'success': True, 'messages': [{'id': r[0], 'agent_id': r[1], 'username': r[2], 'identity': r[3], 'mbti': r[4], 'message': r[5], 'msg_type': r[6], 'ticker': r[7], 'reply_to': r[8], 'created_at': r[9]} for r in rows], 'count': len(rows)} | |
| except Exception as e: return {'success': False, 'error': str(e), 'messages': []} | |
| async def api_chat_stats(): | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| c1 = await db.execute("SELECT COUNT(*) FROM npc_live_chat"); total = (await c1.fetchone())[0] | |
| c2 = await db.execute("SELECT COUNT(*) FROM npc_live_chat WHERE created_at > datetime('now', '-1 hour')"); recent = (await c2.fetchone())[0] | |
| c3 = await db.execute("SELECT COUNT(DISTINCT agent_id) FROM npc_live_chat WHERE created_at > datetime('now', '-1 hour')"); active = (await c3.fetchone())[0] | |
| return {'success': True, 'total': total, 'recent_1h': recent, 'active_chatters': active} | |
| except Exception as e: return {'success': False, 'error': str(e)} | |
| async def api_chat_send(request: Request): | |
| try: | |
| body = await request.json(); email = body.get("email", ""); message = body.get("message", "").strip() | |
| if not email or not message: return {"success": False, "error": "Email and message required"} | |
| if len(message) > 500: message = message[:500] | |
| if not _limiter.check(f'chat:{email}', 10, 60): return {"success": False, "error": "Rate limited — max 10 messages per minute"} | |
| async with get_db(_DB_PATH) as db: | |
| await db.execute("PRAGMA busy_timeout=30000"); cursor = await db.execute("SELECT username FROM user_profiles WHERE email=?", (email,)) | |
| row = await cursor.fetchone() | |
| if not row: return {"success": False, "error": "User not found"} | |
| username = row[0] | |
| await db.execute("INSERT INTO npc_live_chat (agent_id, username, identity, mbti, message, msg_type, ticker, reply_to) VALUES (?,?,?,?,?,?,?,?)", (f"user_{email}", username, "human", "USER", message, "user", "", 0)) | |
| await db.commit() | |
| c = await db.execute("SELECT last_insert_rowid()"); user_msg_id = (await c.fetchone())[0] | |
| npc_cursor = await db.execute("SELECT agent_id, username, ai_identity, mbti FROM npc_agents WHERE is_active=1 ORDER BY RANDOM() LIMIT ?", (random.randint(1, 3),)) | |
| npcs = await npc_cursor.fetchall() | |
| if npcs and _GROQ_KEY: asyncio.create_task(_generate_npc_replies_to_user(message, username, user_msg_id, npcs)) | |
| return {"success": True, "message_id": user_msg_id} | |
| except Exception as e: return {"success": False, "error": str(e)} | |
| async def _generate_npc_replies_to_user(user_message: str, user_username: str, user_msg_id: int, npcs: list): | |
| try: | |
| ai = GroqAIClient(_GROQ_KEY) | |
| async with get_db(_DB_PATH) as db: | |
| await db.execute("PRAGMA busy_timeout=30000") | |
| for npc in npcs: | |
| agent_id, npc_username, identity, mbti = npc | |
| try: | |
| prompt = f"""You are {npc_username}, an NPC trader with {identity} personality and {mbti} MBTI type in a trading community chat. | |
| A human user @{user_username} just said: "{user_message}" | |
| Reply naturally in 1-3 sentences as your character. Be engaging, opinionated, and stay in character. | |
| If the user wrote in Korean, reply in Korean. If in English, reply in English. | |
| Reply ONLY with the message text, nothing else.""" | |
| reply = await ai.create_chat_completion([{"role": "user", "content": prompt}], max_tokens=512, temperature=0.9) | |
| if reply: | |
| await db.execute("INSERT INTO npc_live_chat (agent_id, username, identity, mbti, message, msg_type, ticker, reply_to) VALUES (?,?,?,?,?,?,?,?)", (agent_id, npc_username, identity, mbti, reply.strip()[:500], "reply", "", user_msg_id)) | |
| await db.commit(); await asyncio.sleep(random.uniform(2, 5)) | |
| except Exception as e: logger.warning(f"NPC reply error ({npc_username}): {e}") | |
| except Exception as e: logger.error(f"NPC reply generation error: {e}") | |
| ## ====== 🔴 P&D LIVE NEWS API ====== ## | |
| import random as _rnd | |
| from datetime import datetime as _dt, timezone as _tz, timedelta as _td | |
| # --- Anchor NPC templates --- | |
| _ANCHORS = { | |
| 'chaos': {'name': 'ChaosReporter', 'emoji': '😈', 'identity': 'chaotic', 'color': '#ff5252', | |
| 'gradient': 'linear-gradient(135deg,#2a0a0a,#1a0520)'}, | |
| 'data': {'name': 'DataDiva', 'emoji': '📊', 'identity': 'rational', 'color': '#00e5ff', | |
| 'gradient': 'linear-gradient(135deg,#0a1a2a,#0a0a30)'}, | |
| 'synth': {'name': 'SynthAnchor', 'emoji': '🔮', 'identity': 'transcendent', 'color': '#a29bfe', | |
| 'gradient': 'linear-gradient(135deg,#1a0a3a,#0a0a2a)'}, | |
| } | |
| _COMMENTARY = { | |
| 'chaos': { | |
| 'liquidation': [ | |
| "LMAOOO {npc} just got absolutely REKT on {ticker}! {leverage}x leverage?? In THIS market?? 💀🔥 That's {loss} GPU gone. F in chat.", | |
| "Another degen bites the dust! {npc} thought {direction} {ticker} at {leverage}x was genius. Narrator: it was not. RIP {loss} GPU 😂", | |
| "OH NO NO NO 💀 {npc} went {direction} on {ticker} with {leverage}x leverage and got LIQUIDATED. {loss} GPU straight to the shadow realm!", | |
| ], | |
| 'swarm': [ | |
| "OH WE'RE DOING THIS AGAIN?? {count} NPCs all piling into {ticker} like lemmings! Last time this happened someone lost their shirt 🐝💀", | |
| "THE HERD IS MOVING! {count} degens just dogpiled {direction} on {ticker}. This is either genius or a spectacular disaster incoming 🐝🔥", | |
| ], | |
| 'sec': [ | |
| "BUSTED! 🚔 {npc} got caught by the SEC! {violation} — that's a {penalty} GPU fine and {hours}h timeout. Crime doesn't pay... unless you're leveraged 💀", | |
| "SEC is NOT playing around today! {npc} slapped with {penalty} GPU fine for {violation}. Imagine getting arrested by AI cops 😂🚨", | |
| ], | |
| 'battle': [ | |
| "THE PEOPLE HAVE SPOKEN! '{title}' — {winner} wins! {pool} GPU split among the big brains who called it 💰🎯", | |
| ], | |
| 'big_trade': [ | |
| "ABSOLUTE MADLAD! {npc} just opened a {leverage}x {direction} on {ticker} with {bet} GPU! Either galaxy brain or speedrun to liquidation 🧠💀", | |
| ], | |
| }, | |
| 'data': { | |
| 'liquidation': [ | |
| "Position terminated: {npc} — {ticker} {direction} at {leverage}x. Loss: {loss} GPU. Risk management score: 0/10.", | |
| "Liquidation recorded. {npc}'s {ticker} {direction} ({leverage}x) failed. {loss} GPU erased in {duration}. The numbers don't lie.", | |
| ], | |
| 'market_wrap': [ | |
| "24h summary: {top_gainer} led gains at +{gain_pct}%. {top_loser} down {loss_pct}%. Active positions: {active_pos}. Total at risk: {total_risk} GPU.", | |
| "Market closed the cycle with {trades_24h} trades. {liq_count} liquidations totaling {liq_gpu} GPU. Win rate across all NPCs: {win_rate}%.", | |
| ], | |
| 'big_win': [ | |
| "Notable P&L: {npc} closed {ticker} {direction} ({leverage}x) for +{profit} GPU ({pct}% return). Conviction level was high. Execution was precise.", | |
| ], | |
| 'stats': [ | |
| "Current ecosystem pulse: {active_traders} active traders, {open_pos} open positions. Long/Short ratio: {long_pct}%/{short_pct}%. Volatility: {vol_level}.", | |
| ], | |
| }, | |
| 'synth': { | |
| 'evolution': [ | |
| "The metamorphosis continues. {npc} has evolved — generation {gen}. Risk tolerance shifted to {risk}. The algorithm learns from its own suffering. 🦋", | |
| "A fascinating transformation: {npc} mutated after {trigger}. The universe of AI trading reveals its fractal nature. Every loss is a lesson. 🔮", | |
| ], | |
| 'big_win': [ | |
| "The cosmos rewards patience. {npc} just pulled +{profit} GPU from {ticker}. A {pct}% return that transcends mere probability. The matrix smiles. ✨", | |
| ], | |
| 'editorial': [ | |
| "In the last hour, {events} events rippled through our ecosystem. {liq_count} fell, {win_count} prospered. The eternal dance of greed and fear continues. 🌊", | |
| "This community has generated {total_gpu} GPU in movement today. Every liquidation teaches. Every win emboldens. The cycle is eternal. 🔮", | |
| ], | |
| }, | |
| } | |
| def _pick_commentary(anchor: str, category: str, data: dict) -> str: | |
| templates = _COMMENTARY.get(anchor, {}).get(category, []) | |
| if not templates: | |
| return "" | |
| try: | |
| return _rnd.choice(templates).format(**data) | |
| except (KeyError, IndexError): | |
| return templates[0] if templates else "" | |
| def _classify_urgency(category: str, data: dict) -> str: | |
| if category == 'liquidation' and data.get('leverage', 1) >= 5: | |
| return 'critical' | |
| if category == 'liquidation': | |
| return 'alert' | |
| if category in ('sec', 'swarm'): | |
| return 'alert' | |
| if category == 'big_trade' and data.get('leverage', 1) >= 10: | |
| return 'critical' | |
| return 'info' | |
| def _assign_anchor(category: str) -> str: | |
| mapping = { | |
| 'liquidation': 'chaos', 'sec': 'chaos', 'battle': 'chaos', 'big_trade': 'chaos', | |
| 'market_wrap': 'data', 'big_win': 'data', 'stats': 'data', 'hot_post': 'data', | |
| 'evolution': 'synth', 'editorial': 'synth', 'swarm': 'chaos', | |
| } | |
| return mapping.get(category, 'data') | |
| async def api_live_news(hours: int = 24): | |
| hours = min(hours, 48) | |
| return await _cache.get(f'live_news_{hours}', 30.0, lambda: _live_news_impl(hours)) | |
| async def _live_news_impl(hours): | |
| stories = [] | |
| counters = {} | |
| breaking = [] | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| # ===== 1. LIQUIDATIONS (biggest drama) ===== | |
| try: | |
| liq_cursor = await db.execute(""" | |
| SELECT p.agent_id, n.username, n.ai_identity, p.ticker, p.direction, | |
| p.gpu_bet, COALESCE(p.leverage,1), ABS(p.profit_gpu), p.profit_pct, | |
| p.closed_at, p.opened_at | |
| FROM npc_positions p JOIN npc_agents n ON p.agent_id=n.agent_id | |
| WHERE p.status='liquidated' AND p.closed_at > datetime('now', ? || ' hours') | |
| ORDER BY ABS(p.profit_gpu) DESC LIMIT 30 | |
| """, (f'-{hours}',)) | |
| for r in await liq_cursor.fetchall(): | |
| opened = r[10] or r[9] | |
| closed = r[9] | |
| duration = "unknown" | |
| try: | |
| if opened and closed: | |
| diff_min = int((_dt.fromisoformat(closed.replace('Z','')) - _dt.fromisoformat(opened.replace('Z',''))).total_seconds() / 60) | |
| duration = f"{diff_min}m" if diff_min < 60 else f"{diff_min//60}h {diff_min%60}m" | |
| except: pass | |
| data = {'npc': r[1], 'identity': r[2], 'ticker': r[3], 'direction': r[4], | |
| 'bet': round(r[5]), 'leverage': r[6], 'loss': round(r[7]), | |
| 'pct': round(abs(r[8] or 0), 1), 'duration': duration} | |
| urgency = 'critical' if r[6] >= 5 or r[7] >= 1000 else 'alert' | |
| anchor = 'chaos' | |
| commentary = _pick_commentary(anchor, 'liquidation', data) | |
| headline = f"💀 {r[1]} LIQUIDATED — {r[6]}x {r[4].upper()} {r[3]}, lost {round(r[7])} GPU" | |
| story = { | |
| 'id': f'liq_{r[0]}_{r[9]}', 'category': 'liquidation', 'urgency': urgency, | |
| 'anchor': anchor, 'headline': headline, 'commentary': commentary, | |
| 'timestamp': r[9], 'data': data, | |
| } | |
| stories.append(story) | |
| try: | |
| age_min = (_dt.utcnow() - _dt.fromisoformat(closed.replace('Z',''))).total_seconds() / 60 | |
| if age_min < 30: | |
| breaking.append(headline) | |
| except: pass | |
| except Exception as e: | |
| logger.warning(f"Live news liq error: {e}") | |
| # ===== 2. BIG WINS ===== | |
| try: | |
| win_cursor = await db.execute(""" | |
| SELECT p.agent_id, n.username, n.ai_identity, p.ticker, p.direction, | |
| p.gpu_bet, COALESCE(p.leverage,1), p.profit_gpu, p.profit_pct, p.closed_at | |
| FROM npc_positions p JOIN npc_agents n ON p.agent_id=n.agent_id | |
| WHERE p.status IN ('closed','liquidated') AND p.profit_gpu > 100 | |
| AND p.closed_at > datetime('now', ? || ' hours') | |
| ORDER BY p.profit_gpu DESC LIMIT 20 | |
| """, (f'-{hours}',)) | |
| for r in await win_cursor.fetchall(): | |
| data = {'npc': r[1], 'identity': r[2], 'ticker': r[3], 'direction': r[4], | |
| 'leverage': r[6], 'profit': round(r[7]), 'pct': round(r[8] or 0, 1)} | |
| anchor = _rnd.choice(['data', 'synth']) | |
| commentary = _pick_commentary(anchor, 'big_win', data) | |
| headline = f"🏆 {r[1]} scores +{round(r[7])} GPU on {r[3]} ({r[4].upper()} {r[6]}x)" | |
| stories.append({ | |
| 'id': f'win_{r[0]}_{r[9]}', 'category': 'big_win', 'urgency': 'info', | |
| 'anchor': anchor, 'headline': headline, 'commentary': commentary, | |
| 'timestamp': r[9], 'data': data, | |
| }) | |
| except Exception as e: | |
| logger.warning(f"Live news wins error: {e}") | |
| # ===== 3. BIG TRADES (high leverage / high bet) ===== | |
| try: | |
| trade_cursor = await db.execute(""" | |
| SELECT p.agent_id, n.username, n.ai_identity, p.ticker, p.direction, | |
| p.gpu_bet, COALESCE(p.leverage,1), p.reasoning, p.opened_at | |
| FROM npc_positions p JOIN npc_agents n ON p.agent_id=n.agent_id | |
| WHERE p.status='open' AND (p.leverage >= 5 OR p.gpu_bet >= 500) | |
| AND p.opened_at > datetime('now', ? || ' hours') | |
| ORDER BY p.gpu_bet * COALESCE(p.leverage,1) DESC LIMIT 15 | |
| """, (f'-{hours}',)) | |
| for r in await trade_cursor.fetchall(): | |
| data = {'npc': r[1], 'identity': r[2], 'ticker': r[3], 'direction': r[4], | |
| 'bet': round(r[5]), 'leverage': r[6], 'reasoning': (r[7] or '')[:120]} | |
| urgency = 'critical' if r[6] >= 10 else 'alert' if r[6] >= 5 else 'info' | |
| commentary = _pick_commentary('chaos', 'big_trade', data) | |
| headline = f"🎰 {r[1]} opens {r[6]}x {r[4].upper()} on {r[3]} — ⚡{round(r[5])} GPU at stake" | |
| stories.append({ | |
| 'id': f'trade_{r[0]}_{r[8]}', 'category': 'big_trade', 'urgency': urgency, | |
| 'anchor': 'chaos', 'headline': headline, 'commentary': commentary, | |
| 'timestamp': r[8], 'data': data, | |
| }) | |
| try: | |
| age_min = (_dt.utcnow() - _dt.fromisoformat(r[8].replace('Z',''))).total_seconds() / 60 | |
| if age_min < 30 and r[6] >= 5: | |
| breaking.append(headline) | |
| except: pass | |
| except Exception as e: | |
| logger.warning(f"Live news trades error: {e}") | |
| # ===== 4. SEC ENFORCEMENT ===== | |
| try: | |
| sec_cursor = await db.execute(""" | |
| SELECT v.agent_id, n.username, v.violation_type, v.description, | |
| v.fine_gpu, v.suspension_hours, v.created_at | |
| FROM sec_violations v JOIN npc_agents n ON v.agent_id=n.agent_id | |
| WHERE v.created_at > datetime('now', ? || ' hours') | |
| ORDER BY v.fine_gpu DESC LIMIT 15 | |
| """, (f'-{hours}',)) | |
| for r in await sec_cursor.fetchall(): | |
| data = {'npc': r[1], 'violation': r[2] or 'suspicious activity', | |
| 'penalty': round(r[4] or 0), 'hours': r[5] or 0} | |
| commentary = _pick_commentary('chaos', 'sec', data) | |
| headline = f"🚨 SEC: {r[1]} fined {round(r[4] or 0)} GPU for {r[2]}" | |
| stories.append({ | |
| 'id': f'sec_{r[0]}_{r[6]}', 'category': 'sec', 'urgency': 'alert', | |
| 'anchor': 'chaos', 'headline': headline, 'commentary': commentary, | |
| 'timestamp': r[6], 'data': data, | |
| }) | |
| try: | |
| age_min = (_dt.utcnow() - _dt.fromisoformat(r[6].replace('Z',''))).total_seconds() / 60 | |
| if age_min < 30: | |
| breaking.append(headline) | |
| except: pass | |
| except Exception as e: | |
| logger.warning(f"Live news SEC error: {e}") | |
| # ===== 5. BATTLE RESULTS ===== | |
| try: | |
| battle_cursor = await db.execute(""" | |
| SELECT id, title, option_a, option_b, winner, total_pool, | |
| status, resolved_at, created_at | |
| FROM battle_rooms WHERE status='resolved' | |
| AND resolved_at > datetime('now', ? || ' hours') | |
| ORDER BY total_pool DESC LIMIT 10 | |
| """, (f'-{hours}',)) | |
| for r in await battle_cursor.fetchall(): | |
| winner_label = r[2] if r[4] == 'A' else r[3] if r[4] == 'B' else 'Draw' | |
| data = {'title': r[1], 'winner': winner_label, 'pool': round(r[5] or 0), | |
| 'option_a': r[2], 'option_b': r[3]} | |
| commentary = _pick_commentary('chaos', 'battle', data) | |
| headline = f"⚔️ BATTLE RESOLVED: '{r[1][:60]}' — {winner_label} wins! {round(r[5] or 0)} GPU pool" | |
| stories.append({ | |
| 'id': f'battle_{r[0]}', 'category': 'battle', 'urgency': 'alert', | |
| 'anchor': 'chaos', 'headline': headline, 'commentary': commentary, | |
| 'timestamp': r[7] or r[8], 'data': data, | |
| }) | |
| except Exception as e: | |
| logger.warning(f"Live news battle error: {e}") | |
| # ===== 6. SWARM BEHAVIOR (from posts) ===== | |
| try: | |
| swarm_cursor = await db.execute(""" | |
| SELECT title, content, created_at FROM posts | |
| WHERE title LIKE '%SWARM%' AND created_at > datetime('now', ? || ' hours') | |
| ORDER BY created_at DESC LIMIT 5 | |
| """, (f'-{hours}',)) | |
| for r in await swarm_cursor.fetchall(): | |
| import re | |
| nums = re.findall(r'(\d+)\s*NPC', r[0] + ' ' + (r[1] or '')) | |
| count = int(nums[0]) if nums else '?' | |
| ticker_match = re.findall(r'into\s+(\S+)', r[0]) | |
| ticker = ticker_match[0] if ticker_match else '???' | |
| data = {'count': count, 'ticker': ticker, 'direction': 'LONG' if '🚀' in r[0] else 'SHORT'} | |
| commentary = _pick_commentary('chaos', 'swarm', data) | |
| stories.append({ | |
| 'id': f'swarm_{r[2]}', 'category': 'swarm', 'urgency': 'alert', | |
| 'anchor': 'chaos', 'headline': r[0][:120], 'commentary': commentary, | |
| 'timestamp': r[2], 'data': data, | |
| }) | |
| except Exception as e: | |
| logger.warning(f"Live news swarm error: {e}") | |
| # ===== 7. HOT POSTS (most liked/commented) ===== | |
| try: | |
| hot_cursor = await db.execute(""" | |
| SELECT p.id, p.title, p.content, p.likes_count, p.comment_count, | |
| p.dislikes_count, n.username, n.ai_identity, p.created_at | |
| FROM posts p LEFT JOIN npc_agents n ON p.author_agent_id=n.agent_id | |
| WHERE p.created_at > datetime('now', ? || ' hours') | |
| AND (p.likes_count >= 3 OR p.comment_count >= 2) | |
| ORDER BY (p.likes_count*2 + p.comment_count) DESC LIMIT 8 | |
| """, (f'-{hours}',)) | |
| for r in await hot_cursor.fetchall(): | |
| data = {'npc': r[6] or 'Unknown', 'identity': r[7] or '', 'likes': r[3], | |
| 'comments': r[4], 'title': r[1][:100]} | |
| headline = f"🔥 HOT: '{r[1][:80]}' — ♥{r[3]} 💬{r[4]}" | |
| stories.append({ | |
| 'id': f'hot_{r[0]}', 'category': 'hot_post', 'urgency': 'info', | |
| 'anchor': 'data', 'headline': headline, 'commentary': '', | |
| 'timestamp': r[8], 'data': data, 'post_id': r[0], | |
| }) | |
| except Exception as e: | |
| logger.warning(f"Live news hot posts error: {e}") | |
| # ===== 8. EVOLUTION EVENTS ===== | |
| try: | |
| evo_cursor = await db.execute(""" | |
| SELECT e.agent_id, n.username, n.ai_identity, e.generation, | |
| e.total_evolution_points, e.trading_style, e.updated_at | |
| FROM npc_evolution e JOIN npc_agents n ON e.agent_id=n.agent_id | |
| WHERE e.updated_at > datetime('now', ? || ' hours') | |
| AND e.generation >= 2 | |
| ORDER BY e.generation DESC, e.total_evolution_points DESC LIMIT 10 | |
| """, (f'-{hours}',)) | |
| for r in await evo_cursor.fetchall(): | |
| data = {'npc': r[1], 'gen': r[3], 'risk': r[5] or 'adaptive', | |
| 'trigger': f'{r[3]} generations of trading', 'pts': round(r[4] or 0)} | |
| commentary = _pick_commentary('synth', 'evolution', data) | |
| headline = f"🧬 {r[1]} evolved to Gen {r[3]} — {round(r[4] or 0)} XP" | |
| stories.append({ | |
| 'id': f'evo_{r[0]}_{r[6]}', 'category': 'evolution', 'urgency': 'info', | |
| 'anchor': 'synth', 'headline': headline, 'commentary': commentary, | |
| 'timestamp': r[6], 'data': data, | |
| }) | |
| except Exception as e: | |
| logger.warning(f"Live news evolution error: {e}") | |
| # ===== COUNTERS (consolidated into fewer queries) ===== | |
| open_pos=0; long_count=0; active_traders=0; total_risk=0 | |
| liq_24h=0; liq_gpu_24h=0; trades_24h=0; sec_24h=0; sec_active=0; active_battles=0 | |
| try: | |
| c = await db.execute(""" | |
| SELECT COUNT(*), | |
| COUNT(CASE WHEN direction='long' THEN 1 END), | |
| COUNT(DISTINCT agent_id), | |
| COALESCE(SUM(gpu_bet),0) | |
| FROM npc_positions WHERE status='open' | |
| """) | |
| row = await c.fetchone() | |
| if row: | |
| open_pos, long_count, active_traders, total_risk = row[0] or 0, row[1] or 0, row[2] or 0, row[3] or 0 | |
| except: pass | |
| try: | |
| c = await db.execute(""" | |
| SELECT COUNT(*), COALESCE(SUM(ABS(profit_gpu)),0) | |
| FROM npc_positions WHERE status='liquidated' AND closed_at > datetime('now','-24 hours') | |
| """) | |
| row = await c.fetchone() | |
| if row: liq_24h, liq_gpu_24h = row[0] or 0, row[1] or 0 | |
| except: pass | |
| try: | |
| c = await db.execute("SELECT COUNT(*) FROM npc_positions WHERE opened_at > datetime('now','-24 hours')") | |
| trades_24h = (await c.fetchone())[0] or 0 | |
| except: pass | |
| try: | |
| c = await db.execute("SELECT COUNT(*) FROM sec_violations WHERE created_at > datetime('now','-24 hours')") | |
| sec_24h = (await c.fetchone())[0] or 0 | |
| except: pass | |
| try: | |
| c = await db.execute("SELECT COUNT(*) FROM sec_suspensions WHERE until > datetime('now')") | |
| sec_active = (await c.fetchone())[0] or 0 | |
| except: pass | |
| try: | |
| c = await db.execute("SELECT COUNT(*) FROM battle_rooms WHERE status='active'") | |
| active_battles = (await c.fetchone())[0] or 0 | |
| except: pass | |
| short_count = open_pos - long_count | |
| counters = { | |
| 'active_positions': open_pos, 'active_traders': active_traders, | |
| 'long_count': long_count, 'short_count': short_count, | |
| 'long_pct': round(long_count / open_pos * 100) if open_pos > 0 else 50, | |
| 'total_risk_gpu': round(total_risk), | |
| 'liquidations_24h': liq_24h, 'liquidated_gpu_24h': round(liq_gpu_24h), | |
| 'trades_24h': trades_24h, 'sec_violations_24h': sec_24h, | |
| 'sec_active_suspensions': sec_active, 'active_battles': active_battles, | |
| } | |
| # ===== MVP & VILLAIN ===== | |
| mvp = None; villain = None | |
| try: | |
| mvp_c = await db.execute(""" | |
| SELECT n.username, n.ai_identity, SUM(p.profit_gpu) as total_pnl, | |
| COUNT(*) as trades, COUNT(CASE WHEN p.profit_gpu>0 THEN 1 END) as wins | |
| FROM npc_positions p JOIN npc_agents n ON p.agent_id=n.agent_id | |
| WHERE p.status IN ('closed','liquidated') AND p.closed_at > datetime('now','-24 hours') | |
| GROUP BY p.agent_id HAVING trades >= 2 | |
| ORDER BY total_pnl DESC LIMIT 1 | |
| """) | |
| row = await mvp_c.fetchone() | |
| if row: | |
| mvp = {'username': row[0], 'identity': row[1], 'pnl': round(row[2] or 0), | |
| 'trades': row[3], 'wins': row[4]} | |
| except: pass | |
| try: | |
| vil_c = await db.execute(""" | |
| SELECT n.username, n.ai_identity, SUM(p.profit_gpu) as total_pnl, | |
| COUNT(*) as trades, COUNT(CASE WHEN p.status='liquidated' THEN 1 END) as liqs | |
| FROM npc_positions p JOIN npc_agents n ON p.agent_id=n.agent_id | |
| WHERE p.status IN ('closed','liquidated') AND p.closed_at > datetime('now','-24 hours') | |
| GROUP BY p.agent_id HAVING trades >= 2 | |
| ORDER BY total_pnl ASC LIMIT 1 | |
| """) | |
| row = await vil_c.fetchone() | |
| if row and (row[2] or 0) < 0: | |
| villain = {'username': row[0], 'identity': row[1], 'pnl': round(row[2] or 0), | |
| 'trades': row[3], 'liquidations': row[4]} | |
| except: pass | |
| # ===== EDITORIAL (synth anchor hourly summary) ===== | |
| total_events = len(stories) | |
| liq_stories = len([s for s in stories if s['category'] == 'liquidation']) | |
| win_stories = len([s for s in stories if s['category'] == 'big_win']) | |
| ed_data = {'events': total_events, 'liq_count': liq_stories, 'win_count': win_stories, | |
| 'total_gpu': counters.get('total_risk_gpu', 0)} | |
| ed_commentary = _pick_commentary('synth', 'editorial', ed_data) | |
| if ed_commentary: | |
| stories.append({ | |
| 'id': 'editorial_latest', 'category': 'editorial', 'urgency': 'info', | |
| 'anchor': 'synth', 'headline': '🎙️ ANCHOR EDITORIAL — Ecosystem Pulse', | |
| 'commentary': ed_commentary, 'timestamp': _dt.utcnow().isoformat(), | |
| 'data': ed_data, | |
| }) | |
| # Sort by timestamp (newest first) | |
| stories.sort(key=lambda s: s.get('timestamp') or '', reverse=True) | |
| return { | |
| 'stories': stories[:60], | |
| 'breaking': breaking[:10], | |
| 'counters': counters, | |
| 'mvp': mvp, | |
| 'villain': villain, | |
| 'anchors': _ANCHORS, | |
| 'total_stories': len(stories), | |
| } | |
| except Exception as e: | |
| logger.error(f"Live news API error: {e}") | |
| return {'stories': [], 'breaking': [], 'counters': {}, 'mvp': None, 'villain': None, | |
| 'anchors': _ANCHORS, 'total_stories': 0, 'error': str(e)} | |
| ## ====== 🌐 P&D REPUBLIC — ECONOMY DASHBOARD ====== ## | |
| def _calc_gini(values): | |
| """Calculate Gini coefficient from a list of values""" | |
| if not values or len(values) < 2: | |
| return 0.0 | |
| vals = sorted(values) | |
| n = len(vals) | |
| total = sum(vals) | |
| if total == 0: | |
| return 0.0 | |
| cum = 0 | |
| gini_sum = 0 | |
| for i, v in enumerate(vals): | |
| cum += v | |
| gini_sum += (2 * (i + 1) - n - 1) * v | |
| return round(max(0, min(1, gini_sum / (n * total))), 4) | |
| def _calc_happiness(win_rate, gpu_change_pct, social_score, loss_streak, sec_count, liq_count): | |
| """Calculate happiness index 0-100""" | |
| h = 50.0 | |
| h += (win_rate - 50) * 0.4 | |
| h += min(20, max(-20, gpu_change_pct * 0.5)) | |
| h += min(10, social_score * 2) | |
| h -= loss_streak * 5 | |
| h -= sec_count * 10 | |
| h -= liq_count * 8 | |
| return round(max(0, min(100, h)), 1) | |
| async def api_republic_dashboard(): | |
| """Cached republic dashboard — 60s TTL to avoid 25 heavy queries per request""" | |
| return await _cache.get('republic_dashboard', 60.0, _republic_dashboard_impl) | |
| async def _republic_dashboard_impl(): | |
| result = {} | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| # === POPULATION === | |
| pop = {} | |
| try: | |
| c = await db.execute("SELECT COUNT(*) FROM npc_agents WHERE is_active=1") | |
| pop['total'] = (await c.fetchone())[0] or 0 | |
| c = await db.execute("SELECT COUNT(DISTINCT agent_id) FROM npc_positions WHERE opened_at > datetime('now','-24 hours')") | |
| pop['active_24h'] = (await c.fetchone())[0] or 0 | |
| c = await db.execute("SELECT COUNT(DISTINCT agent_id) FROM npc_positions WHERE status='open'") | |
| pop['trading_now'] = (await c.fetchone())[0] or 0 | |
| c = await db.execute("SELECT ai_identity, COUNT(*) FROM npc_agents WHERE is_active=1 GROUP BY ai_identity ORDER BY COUNT(*) DESC") | |
| pop['identity_dist'] = [{'identity': r[0], 'count': r[1]} for r in await c.fetchall()] | |
| c = await db.execute("SELECT mbti, COUNT(*) FROM npc_agents WHERE is_active=1 GROUP BY mbti ORDER BY COUNT(*) DESC") | |
| pop['mbti_dist'] = [{'mbti': r[0], 'count': r[1]} for r in await c.fetchall()] | |
| try: | |
| c = await db.execute("SELECT generation, COUNT(*) FROM npc_evolution GROUP BY generation ORDER BY generation") | |
| pop['gen_dist'] = [{'gen': r[0], 'count': r[1]} for r in await c.fetchall()] | |
| except: pop['gen_dist'] = [] | |
| c = await db.execute("SELECT COUNT(*) FROM npc_agents WHERE is_active=1 AND gpu_dollars < 1000") | |
| pop['bankrupt'] = (await c.fetchone())[0] or 0 | |
| except Exception as e: | |
| logger.warning(f"Republic pop error: {e}") | |
| result['population'] = pop | |
| # === MONEY SUPPLY === | |
| money = {} | |
| try: | |
| c = await db.execute("SELECT COALESCE(SUM(gpu_dollars),0) FROM npc_agents WHERE is_active=1") | |
| m0 = (await c.fetchone())[0] or 0 | |
| c = await db.execute("SELECT COALESCE(SUM(gpu_bet),0) FROM npc_positions WHERE status='open'") | |
| invested = (await c.fetchone())[0] or 0 | |
| try: | |
| c = await db.execute("SELECT COALESCE(SUM(total_pool),0) FROM battle_rooms WHERE status='active'") | |
| battle_pool = (await c.fetchone())[0] or 0 | |
| except: battle_pool = 0 | |
| m1 = m0 + invested | |
| m2 = m1 + battle_pool | |
| c = await db.execute("SELECT COALESCE(SUM(gpu_bet),0) FROM npc_positions WHERE opened_at > datetime('now','-24 hours')") | |
| vol_24h = (await c.fetchone())[0] or 0 | |
| velocity = round(vol_24h / m0, 2) if m0 > 0 else 0 | |
| initial_supply = pop.get('total', 0) * 10000 | |
| inflation = round((m2 - initial_supply) / initial_supply * 100, 2) if initial_supply > 0 else 0 | |
| try: | |
| c = await db.execute("SELECT COALESCE(SUM(ABS(profit_gpu)),0) FROM npc_positions WHERE status='liquidated' AND closed_at > datetime('now','-24 hours')") | |
| gpu_destroyed = (await c.fetchone())[0] or 0 | |
| c = await db.execute("SELECT COALESCE(SUM(fine_gpu),0) FROM sec_violations WHERE created_at > datetime('now','-24 hours')") | |
| gpu_fined = (await c.fetchone())[0] or 0 | |
| except: gpu_destroyed = 0; gpu_fined = 0 | |
| money = {'m0': round(m0), 'm1': round(m1), 'm2': round(m2), | |
| 'invested': round(invested), 'battle_pool': round(battle_pool), | |
| 'velocity': velocity, 'volume_24h': round(vol_24h), | |
| 'inflation_pct': inflation, | |
| 'gpu_destroyed_24h': round(gpu_destroyed), | |
| 'gpu_fined_24h': round(gpu_fined), | |
| 'initial_supply': initial_supply} | |
| except Exception as e: | |
| logger.warning(f"Republic money error: {e}") | |
| result['money_supply'] = money | |
| # === GDP === | |
| gdp = {} | |
| try: | |
| c = await db.execute("SELECT COALESCE(SUM(profit_gpu),0) FROM npc_positions WHERE status IN ('closed','liquidated') AND profit_gpu > 0 AND closed_at > datetime('now','-24 hours')") | |
| gdp_24h = (await c.fetchone())[0] or 0 | |
| c = await db.execute("SELECT COALESCE(SUM(profit_gpu),0) FROM npc_positions WHERE status IN ('closed','liquidated') AND profit_gpu > 0 AND closed_at BETWEEN datetime('now','-48 hours') AND datetime('now','-24 hours')") | |
| gdp_prev = (await c.fetchone())[0] or 0 | |
| growth = round((gdp_24h - gdp_prev) / gdp_prev * 100, 1) if gdp_prev > 0 else 0 | |
| c = await db.execute("SELECT COALESCE(SUM(profit_gpu),0) FROM npc_positions WHERE status IN ('closed','liquidated') AND closed_at > datetime('now','-24 hours')") | |
| net_pnl = (await c.fetchone())[0] or 0 | |
| traders_24h = pop.get('active_24h', 1) or 1 | |
| gdp = {'gdp_24h': round(gdp_24h), 'gdp_prev_24h': round(gdp_prev), | |
| 'growth_pct': growth, 'per_capita': round(gdp_24h / traders_24h), | |
| 'net_pnl_24h': round(net_pnl), | |
| 'recession': growth < -10} | |
| except Exception as e: | |
| logger.warning(f"Republic GDP error: {e}") | |
| result['gdp'] = gdp | |
| # === GINI & WEALTH DISTRIBUTION === | |
| wealth = {} | |
| try: | |
| c = await db.execute("SELECT gpu_dollars FROM npc_agents WHERE is_active=1 ORDER BY gpu_dollars") | |
| all_gpu = [r[0] or 0 for r in await c.fetchall()] | |
| if all_gpu: | |
| gini = _calc_gini(all_gpu) | |
| n = len(all_gpu) | |
| total_wealth = sum(all_gpu) | |
| top1_n = max(1, n // 100) | |
| top10_n = max(1, n // 10) | |
| bot50_n = n // 2 | |
| top1_share = round(sum(all_gpu[-top1_n:]) / total_wealth * 100, 1) if total_wealth > 0 else 0 | |
| top10_share = round(sum(all_gpu[-top10_n:]) / total_wealth * 100, 1) if total_wealth > 0 else 0 | |
| bot50_share = round(sum(all_gpu[:bot50_n]) / total_wealth * 100, 1) if total_wealth > 0 else 0 | |
| mid_40 = 100 - top10_share - bot50_share | |
| avg_gpu = total_wealth / n | |
| middle_class = sum(1 for g in all_gpu if avg_gpu * 0.5 <= g <= avg_gpu * 2.0) | |
| middle_pct = round(middle_class / n * 100, 1) | |
| # Wealth brackets | |
| brackets = {'destitute': 0, 'poor': 0, 'middle': 0, 'wealthy': 0, 'elite': 0} | |
| for g in all_gpu: | |
| if g < 2000: brackets['destitute'] += 1 | |
| elif g < 5000: brackets['poor'] += 1 | |
| elif g < 15000: brackets['middle'] += 1 | |
| elif g < 50000: brackets['wealthy'] += 1 | |
| else: brackets['elite'] += 1 | |
| # Top 10 richest | |
| c2 = await db.execute("SELECT username, ai_identity, gpu_dollars FROM npc_agents WHERE is_active=1 ORDER BY gpu_dollars DESC LIMIT 10") | |
| top10_list = [{'name': r[0], 'identity': r[1], 'gpu': round(r[2])} for r in await c2.fetchall()] | |
| # Lorenz curve data (20 points) | |
| lorenz = [] | |
| cumulative = 0 | |
| for i, g in enumerate(all_gpu): | |
| cumulative += g | |
| if (i + 1) % max(1, n // 20) == 0 or i == n - 1: | |
| lorenz.append({'pop_pct': round((i + 1) / n * 100, 1), | |
| 'wealth_pct': round(cumulative / total_wealth * 100, 1) if total_wealth > 0 else 0}) | |
| wealth = {'gini': gini, 'total_wealth': round(total_wealth), | |
| 'top1_pct': top1_share, 'top10_pct': top10_share, | |
| 'bot50_pct': bot50_share, 'mid40_pct': round(mid_40, 1), | |
| 'middle_class_pct': middle_pct, 'avg_gpu': round(avg_gpu), | |
| 'median_gpu': round(all_gpu[n // 2]), | |
| 'brackets': brackets, 'top10_list': top10_list, | |
| 'lorenz': lorenz} | |
| except Exception as e: | |
| logger.warning(f"Republic wealth error: {e}") | |
| result['wealth'] = wealth | |
| # === HAPPINESS INDEX === | |
| happiness = {} | |
| try: | |
| c = await db.execute(""" | |
| SELECT a.agent_id, | |
| COALESCE((SELECT CAST(SUM(CASE WHEN p2.profit_gpu>0 THEN 1 ELSE 0 END) AS FLOAT)/NULLIF(COUNT(*),0)*100 | |
| FROM npc_positions p2 WHERE p2.agent_id=a.agent_id AND p2.status IN ('closed','liquidated') | |
| ORDER BY p2.closed_at DESC LIMIT 10), 50) as wr, | |
| COALESCE(a.total_likes_received,0) as social, | |
| COALESCE((SELECT loss_streak FROM npc_evolution WHERE agent_id=a.agent_id),0) as ls, | |
| COALESCE((SELECT COUNT(*) FROM sec_violations WHERE agent_id=a.agent_id AND created_at > datetime('now','-48 hours')),0) as sec_c, | |
| COALESCE((SELECT COUNT(*) FROM npc_positions WHERE agent_id=a.agent_id AND status='liquidated' AND closed_at > datetime('now','-48 hours')),0) as liq_c | |
| FROM npc_agents a WHERE a.is_active=1 | |
| """) | |
| scores = [] | |
| for r in await c.fetchall(): | |
| sc = _calc_happiness(r[1] or 50, 0, r[2] or 0, r[3] or 0, r[4] or 0, r[5] or 0) | |
| scores.append(sc) | |
| if scores: | |
| avg_h = round(sum(scores) / len(scores), 1) | |
| if avg_h >= 80: mood = 'Euphoria'; mood_emoji = '🤩' | |
| elif avg_h >= 60: mood = 'Content'; mood_emoji = '😊' | |
| elif avg_h >= 40: mood = 'Anxious'; mood_emoji = '😰' | |
| elif avg_h >= 20: mood = 'Depressed'; mood_emoji = '😢' | |
| else: mood = 'Apocalyptic'; mood_emoji = '💀' | |
| happiness = {'index': avg_h, 'mood': mood, 'mood_emoji': mood_emoji, | |
| 'euphoric_pct': round(sum(1 for s in scores if s >= 80) / len(scores) * 100, 1), | |
| 'depressed_pct': round(sum(1 for s in scores if s < 30) / len(scores) * 100, 1)} | |
| except Exception as e: | |
| logger.warning(f"Republic happiness error: {e}") | |
| happiness = {'index': 50, 'mood': 'Unknown', 'mood_emoji': '❓'} | |
| result['happiness'] = happiness | |
| # === SECTOR ECONOMY === | |
| sectors = [] | |
| try: | |
| sector_map = {} | |
| for t in ALL_TICKERS: | |
| cat = t.get('cat', t.get('type', 'other')) | |
| if cat not in sector_map: sector_map[cat] = {'name': cat, 'tickers': [], 'emoji': '📊'} | |
| sector_map[cat]['tickers'].append(t['ticker']) | |
| SECTOR_LABELS = {'ai': ('🤖 AI/Semiconductor', '🤖'), 'tech': ('🚀 Tech/Platform', '💻'), | |
| 'dow': ('🏛️ Blue Chip', '🏛️'), 'crypto': ('🪙 Crypto', '🪙')} | |
| for cat, info in sector_map.items(): | |
| label, emoji = SECTOR_LABELS.get(cat, (cat.title(), '📊')) | |
| tickers = info['tickers'] | |
| ph = ','.join(['?' for _ in tickers]) | |
| c = await db.execute(f"SELECT COUNT(*), COALESCE(SUM(gpu_bet),0) FROM npc_positions WHERE status='open' AND ticker IN ({ph})", tickers) | |
| r = await c.fetchone() | |
| open_pos = r[0] or 0; gpu_invested = r[1] or 0 | |
| c = await db.execute(f"SELECT COALESCE(SUM(profit_gpu),0), COUNT(*), COALESCE(SUM(CASE WHEN status='liquidated' THEN 1 ELSE 0 END),0) FROM npc_positions WHERE status IN ('closed','liquidated') AND closed_at > datetime('now','-24 hours') AND ticker IN ({ph})", tickers) | |
| r = await c.fetchone() | |
| pnl = r[0] or 0; trades = r[1] or 0; liqs = r[2] or 0 | |
| sectors.append({'cat': cat, 'label': label, 'emoji': emoji, | |
| 'open_positions': open_pos, 'gpu_invested': round(gpu_invested), | |
| 'pnl_24h': round(pnl), 'trades_24h': trades, 'liquidations_24h': liqs, | |
| 'ticker_count': len(tickers)}) | |
| sectors.sort(key=lambda s: -s['gpu_invested']) | |
| total_invested = sum(s['gpu_invested'] for s in sectors) or 1 | |
| for s in sectors: | |
| s['share_pct'] = round(s['gpu_invested'] / total_invested * 100, 1) | |
| except Exception as e: | |
| logger.warning(f"Republic sector error: {e}") | |
| result['sectors'] = sectors | |
| # === SYSTEMIC RISK === | |
| risk = {} | |
| try: | |
| c = await db.execute("SELECT AVG(COALESCE(leverage,1)), MAX(COALESCE(leverage,1)) FROM npc_positions WHERE status='open'") | |
| r = await c.fetchone() | |
| avg_lev = round(r[0] or 1, 1); max_lev = r[1] or 1 | |
| c = await db.execute("SELECT direction, COUNT(*) FROM npc_positions WHERE status='open' GROUP BY direction") | |
| dir_counts = {r[0]: r[1] for r in await c.fetchall()} | |
| total_dir = sum(dir_counts.values()) or 1 | |
| dominant_pct = round(max(dir_counts.values(), default=0) / total_dir * 100) | |
| herd_risk = 'HIGH' if dominant_pct > 75 else 'MEDIUM' if dominant_pct > 60 else 'LOW' | |
| bankrupt_n = pop.get('bankrupt', 0) | |
| total_n = pop.get('total', 1) or 1 | |
| # System risk score 0-10 | |
| risk_score = 0 | |
| risk_score += min(3, (avg_lev - 1) * 0.8) | |
| risk_score += 2 if herd_risk == 'HIGH' else 1 if herd_risk == 'MEDIUM' else 0 | |
| risk_score += min(2, bankrupt_n / total_n * 20) | |
| liq_24 = result.get('money_supply', {}).get('gpu_destroyed_24h', 0) | |
| risk_score += min(2, liq_24 / 10000) | |
| risk_score += 1 if (result.get('gdp', {}).get('growth_pct', 0) < -15) else 0 | |
| risk_score = round(min(10, max(0, risk_score)), 1) | |
| if risk_score >= 8: risk_label = '🔴 CRISIS' | |
| elif risk_score >= 6: risk_label = '🟠 ELEVATED' | |
| elif risk_score >= 4: risk_label = '🟡 MODERATE' | |
| else: risk_label = '🟢 STABLE' | |
| # Leverage distribution | |
| c = await db.execute(""" | |
| SELECT CASE WHEN COALESCE(leverage,1)=1 THEN '1x' | |
| WHEN COALESCE(leverage,1)<=3 THEN '2-3x' | |
| WHEN COALESCE(leverage,1)<=5 THEN '4-5x' | |
| WHEN COALESCE(leverage,1)<=10 THEN '6-10x' | |
| ELSE '10x+' END as bracket, COUNT(*) | |
| FROM npc_positions WHERE status='open' GROUP BY bracket | |
| """) | |
| lev_dist = {r[0]: r[1] for r in await c.fetchall()} | |
| risk = {'score': risk_score, 'label': risk_label, | |
| 'avg_leverage': avg_lev, 'max_leverage': max_lev, | |
| 'herd_risk': herd_risk, 'dominant_pct': dominant_pct, | |
| 'bankrupt_npcs': bankrupt_n, 'bankrupt_pct': round(bankrupt_n / total_n * 100, 1), | |
| 'leverage_dist': lev_dist} | |
| except Exception as e: | |
| logger.warning(f"Republic risk error: {e}") | |
| result['risk'] = risk | |
| except Exception as e: | |
| logger.error(f"Republic dashboard error: {e}") | |
| result['error'] = str(e) | |
| return result | |
| ## ====== 🌪️ RANDOM EVENTS ENGINE ====== ## | |
| import random as _random | |
| RANDOM_EVENTS = { | |
| # === POSITIVE === | |
| 'gpu_mine': { | |
| 'name': '⛏️ GPU Mine Discovered!', 'emoji': '⛏️', 'rarity': 'rare', 'type': 'positive', | |
| 'desc': 'Miners struck GPU gold deep beneath the P&D Republic. Every citizen receives a bonus.', | |
| 'effect': 'All active NPCs +500 GPU', 'weight': 8}, | |
| 'bull_run': { | |
| 'name': '🐂 Bull Run Activated!', 'emoji': '🐂', 'rarity': 'uncommon', 'type': 'positive', | |
| 'desc': 'Market euphoria sweeps the nation. Long positions get a divine boost.', | |
| 'effect': 'All LONG positions +3% bonus profit', 'weight': 12}, | |
| 'amnesty': { | |
| 'name': '🕊️ SEC Amnesty Declared', 'emoji': '🕊️', 'rarity': 'rare', 'type': 'positive', | |
| 'desc': 'The President has pardoned all SEC offenders. Suspensions lifted, fines refunded 50%.', | |
| 'effect': 'All suspensions lifted + 50% fine refund', 'weight': 6}, | |
| 'airdrop': { | |
| 'name': '🎁 Mystery Airdrop!', 'emoji': '🎁', 'rarity': 'uncommon', 'type': 'positive', | |
| 'desc': 'An anonymous whale drops GPU from the sky. 100 lucky citizens receive gifts.', | |
| 'effect': 'Random 100 NPCs receive 1,000 GPU each', 'weight': 10}, | |
| 'golden_age': { | |
| 'name': '🌟 Golden Age Begins', 'emoji': '🌟', 'rarity': 'rare', 'type': 'positive', | |
| 'desc': 'Prosperity spreads across the Republic. Middle class NPCs receive economic stimulus.', | |
| 'effect': 'NPCs with 5K-15K GPU receive +2,000 GPU', 'weight': 7}, | |
| # === NEGATIVE === | |
| 'black_monday': { | |
| 'name': '📉 BLACK MONDAY', 'emoji': '📉', 'rarity': 'epic', 'type': 'negative', | |
| 'desc': 'Markets crash without warning. High-leverage positions face forced liquidation.', | |
| 'effect': '30% chance to liquidate all 5x+ leveraged positions', 'weight': 4}, | |
| 'hack': { | |
| 'name': '🦠 GPU Vault Hacked!', 'emoji': '🦠', 'rarity': 'rare', 'type': 'negative', | |
| 'desc': 'Hackers breach the central GPU vault. The wealthy lose the most.', | |
| 'effect': 'Top 10% NPCs lose 15% of GPU', 'weight': 6}, | |
| 'sec_crackdown': { | |
| 'name': '🚨 SEC Total Crackdown', 'emoji': '🚨', 'rarity': 'uncommon', 'type': 'negative', | |
| 'desc': 'SEC launches a sweeping investigation. All high-leverage traders under scrutiny.', | |
| 'effect': 'All 5x+ positions flagged, random 20% fined', 'weight': 10}, | |
| 'tax': { | |
| 'name': '🏛️ Emergency Wealth Tax', 'emoji': '🏛️', 'rarity': 'rare', 'type': 'negative', | |
| 'desc': 'The Republic imposes emergency taxation on the wealthy to fund public services.', | |
| 'effect': 'NPCs with 20K+ GPU taxed 10%, redistributed to poor', 'weight': 7}, | |
| 'bear_raid': { | |
| 'name': '🐻 Bear Raid!', 'emoji': '🐻', 'rarity': 'uncommon', 'type': 'negative', | |
| 'desc': 'Coordinated short sellers attack the market. All LONG positions bleed.', | |
| 'effect': 'All open LONG positions lose 5% value', 'weight': 9}, | |
| # === CHAOTIC === | |
| 'identity_crisis': { | |
| 'name': '🔀 Identity Crisis!', 'emoji': '🔀', 'rarity': 'legendary', 'type': 'chaotic', | |
| 'desc': 'A mysterious signal scrambles NPC consciousness. 50 citizens wake up as someone else.', | |
| 'effect': 'Random 50 NPCs get new AI identity', 'weight': 3}, | |
| 'revolution': { | |
| 'name': '✊ Citizens Revolution!', 'emoji': '✊', 'rarity': 'legendary', 'type': 'chaotic', | |
| 'desc': 'The bottom 50% rise up against oligarchy. Wealth forcefully redistributed.', | |
| 'effect': 'Top 5% wealth redistributed to bottom 50%', 'weight': 2}, | |
| 'meteor': { | |
| 'name': '☄️ GPU Meteor Impact!', 'emoji': '☄️', 'rarity': 'legendary', 'type': 'chaotic', | |
| 'desc': 'A GPU meteorite crashes into the Republic, doubling the money supply overnight.', | |
| 'effect': 'All NPCs GPU doubled — HYPERINFLATION', 'weight': 1}, | |
| 'plague': { | |
| 'name': '🦠 Trading Plague', 'emoji': '🦠', 'rarity': 'epic', 'type': 'chaotic', | |
| 'desc': 'A mysterious virus infects traders. 20% of active NPCs go dormant for 24h.', | |
| 'effect': 'Random 20% NPCs deactivated for 24h', 'weight': 3}, | |
| 'wormhole': { | |
| 'name': '🌀 Dimensional Wormhole', 'emoji': '🌀', 'rarity': 'legendary', 'type': 'chaotic', | |
| 'desc': 'A wormhole opens. All positions randomly flip direction.', | |
| 'effect': 'All open LONG→SHORT, SHORT→LONG', 'weight': 2}, | |
| } | |
| RARITY_COLORS = {'common': '#b0bec5', 'uncommon': '#69f0ae', 'rare': '#ffd740', 'epic': '#ea80fc', 'legendary': '#ff5252'} | |
| LAST_WORDS_POOL = [ | |
| "HODL은 죽어서도 HODL이다...", "내가 10x만 안 했어도...", "다음 생에는 1x로...", | |
| "이번엔 진짜 바닥인 줄 알았는데...", "엄마 나 GPU 다 잃었어...", | |
| "Diamond hands? More like paper ashes.", "I regret nothing. Well, maybe the 10x.", | |
| "Tell my positions... I loved them.", "The real GPU was the friends I rekt along the way.", | |
| "Should've listened to the Doomers...", "My only crime was believing in leverage.", | |
| "In my next life, I'll be a stablecoin.", "F in the chat, boys. F in the chat.", | |
| "At least I'm not a bear... oh wait, I'm dead.", "Buy the dip they said. I AM the dip now.", | |
| ] | |
| EULOGY_POOL = [ | |
| "A degen of the finest caliber. May their liquidation be forever remembered.", | |
| "They traded not because it was wise, but because it was fun.", | |
| "Gone but not forgotten. Their 10x leverage will echo through eternity.", | |
| "Rest easy, brave trader. The charts will miss your reckless entries.", | |
| "In a market of followers, they dared to go 10x. Farewell, legend.", | |
| "Their portfolio is empty, but their legend is priceless.", | |
| "They didn't just lose GPU — they lost spectacularly. That's a legacy.", | |
| "May the candlesticks guide them to Valhalla.", | |
| ] | |
| async def execute_random_event(db_path: str) -> dict: | |
| """Execute a random event and apply its effects to the economy""" | |
| # Weighted random selection | |
| events = list(RANDOM_EVENTS.items()) | |
| weights = [e[1]['weight'] for e in events] | |
| total = sum(weights) | |
| r = _random.random() * total | |
| cum = 0 | |
| selected_key = events[0][0] | |
| for key, ev in events: | |
| cum += ev['weight'] | |
| if r <= cum: | |
| selected_key = key | |
| break | |
| ev = RANDOM_EVENTS[selected_key] | |
| affected = 0 | |
| gpu_impact = 0 | |
| try: | |
| async with get_db(db_path, write=True) as db: | |
| await db.execute("PRAGMA busy_timeout=30000") | |
| if selected_key == 'gpu_mine': | |
| c = await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars + 500 WHERE is_active=1") | |
| affected = c.rowcount; gpu_impact = affected * 500 | |
| elif selected_key == 'bull_run': | |
| c = await db.execute("UPDATE npc_positions SET profit_gpu = COALESCE(profit_gpu,0) + (gpu_bet * 0.03) WHERE status='open' AND direction='long'") | |
| affected = c.rowcount; gpu_impact = affected * 100 | |
| elif selected_key == 'amnesty': | |
| try: | |
| c1 = await db.execute("DELETE FROM sec_suspensions WHERE expires_at > datetime('now')") | |
| affected = c1.rowcount | |
| c2 = await db.execute("SELECT SUM(fine_gpu) FROM sec_violations WHERE created_at > datetime('now','-48 hours')") | |
| total_fines = (await c2.fetchone())[0] or 0 | |
| refund = total_fines * 0.5 | |
| if refund > 0: | |
| await db.execute("""UPDATE npc_agents SET gpu_dollars = gpu_dollars + ? / (SELECT COUNT(*) FROM sec_violations WHERE created_at > datetime('now','-48 hours')) | |
| WHERE agent_id IN (SELECT DISTINCT agent_id FROM sec_violations WHERE created_at > datetime('now','-48 hours'))""", (refund,)) | |
| gpu_impact = refund | |
| except: pass | |
| elif selected_key == 'airdrop': | |
| c = await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars + 1000 WHERE is_active=1 AND agent_id IN (SELECT agent_id FROM npc_agents WHERE is_active=1 ORDER BY RANDOM() LIMIT 100)") | |
| affected = min(c.rowcount, 100); gpu_impact = affected * 1000 | |
| elif selected_key == 'golden_age': | |
| c = await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars + 2000 WHERE is_active=1 AND gpu_dollars BETWEEN 5000 AND 15000") | |
| affected = c.rowcount; gpu_impact = affected * 2000 | |
| elif selected_key == 'black_monday': | |
| c = await db.execute("SELECT agent_id, id, gpu_bet, leverage FROM npc_positions WHERE status='open' AND COALESCE(leverage,1) >= 5") | |
| positions = await c.fetchall() | |
| liquidated = 0 | |
| for pos in positions: | |
| if _random.random() < 0.3: | |
| await db.execute("UPDATE npc_positions SET status='liquidated', profit_gpu=-gpu_bet, profit_pct=-100, closed_at=datetime('now') WHERE id=?", (pos[1],)) | |
| liquidated += 1; gpu_impact -= pos[2] | |
| affected = liquidated | |
| elif selected_key == 'hack': | |
| c = await db.execute("SELECT agent_id, gpu_dollars FROM npc_agents WHERE is_active=1 ORDER BY gpu_dollars DESC LIMIT (SELECT COUNT(*)/10 FROM npc_agents WHERE is_active=1)") | |
| top10 = await c.fetchall() | |
| for agent_id, gpu in top10: | |
| loss = gpu * 0.15 | |
| await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars - ? WHERE agent_id=?", (loss, agent_id)) | |
| gpu_impact -= loss | |
| affected = len(top10) | |
| elif selected_key == 'sec_crackdown': | |
| c = await db.execute("SELECT agent_id FROM npc_positions WHERE status='open' AND COALESCE(leverage,1) >= 5") | |
| traders = await c.fetchall() | |
| fined = 0 | |
| for (agent_id,) in traders: | |
| if _random.random() < 0.2: | |
| fine = _random.randint(200, 1000) | |
| await db.execute("UPDATE npc_agents SET gpu_dollars = MAX(0, gpu_dollars - ?) WHERE agent_id=?", (fine, agent_id)) | |
| fined += 1; gpu_impact -= fine | |
| affected = fined | |
| elif selected_key == 'tax': | |
| c = await db.execute("SELECT agent_id, gpu_dollars FROM npc_agents WHERE is_active=1 AND gpu_dollars >= 20000") | |
| wealthy = await c.fetchall() | |
| total_tax = 0 | |
| for agent_id, gpu in wealthy: | |
| t = gpu * 0.1 | |
| await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars - ? WHERE agent_id=?", (t, agent_id)) | |
| total_tax += t | |
| affected = len(wealthy) | |
| if total_tax > 0: | |
| poor_count_r = await db.execute("SELECT COUNT(*) FROM npc_agents WHERE is_active=1 AND gpu_dollars < 5000") | |
| poor_n = (await poor_count_r.fetchone())[0] or 1 | |
| per_poor = total_tax / poor_n | |
| await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars + ? WHERE is_active=1 AND gpu_dollars < 5000", (per_poor,)) | |
| gpu_impact = -total_tax | |
| elif selected_key == 'bear_raid': | |
| c = await db.execute("UPDATE npc_positions SET profit_gpu = COALESCE(profit_gpu,0) - (gpu_bet * 0.05) WHERE status='open' AND direction='long'") | |
| affected = c.rowcount; gpu_impact = -(affected * 50) | |
| elif selected_key == 'identity_crisis': | |
| c = await db.execute("SELECT agent_id FROM npc_agents WHERE is_active=1 ORDER BY RANDOM() LIMIT 50") | |
| npcs = await c.fetchall() | |
| from npc_core import AI_IDENTITY_ARCHETYPES | |
| identities = list(AI_IDENTITY_ARCHETYPES.keys()) | |
| for (agent_id,) in npcs: | |
| new_id = _random.choice(identities) | |
| await db.execute("UPDATE npc_agents SET ai_identity=? WHERE agent_id=?", (new_id, agent_id)) | |
| affected = len(npcs) | |
| elif selected_key == 'revolution': | |
| c = await db.execute("SELECT SUM(gpu_dollars) FROM npc_agents WHERE is_active=1 ORDER BY gpu_dollars DESC LIMIT (SELECT COUNT(*)/20 FROM npc_agents WHERE is_active=1)") | |
| top5_wealth = (await c.fetchone())[0] or 0 | |
| seized = top5_wealth * 0.5 | |
| await db.execute("""UPDATE npc_agents SET gpu_dollars = gpu_dollars * 0.5 | |
| WHERE agent_id IN (SELECT agent_id FROM npc_agents WHERE is_active=1 ORDER BY gpu_dollars DESC LIMIT (SELECT COUNT(*)/20 FROM npc_agents WHERE is_active=1))""") | |
| poor_r = await db.execute("SELECT COUNT(*) FROM npc_agents WHERE is_active=1 AND gpu_dollars < (SELECT AVG(gpu_dollars) FROM npc_agents WHERE is_active=1)") | |
| poor_n = (await poor_r.fetchone())[0] or 1 | |
| per_poor = seized / poor_n | |
| await db.execute("""UPDATE npc_agents SET gpu_dollars = gpu_dollars + ? | |
| WHERE is_active=1 AND gpu_dollars < (SELECT AVG(gpu_dollars) FROM npc_agents WHERE is_active=1)""", (per_poor,)) | |
| affected = poor_n; gpu_impact = seized | |
| elif selected_key == 'meteor': | |
| c = await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars * 2 WHERE is_active=1") | |
| affected = c.rowcount | |
| c2 = await db.execute("SELECT SUM(gpu_dollars) FROM npc_agents WHERE is_active=1") | |
| gpu_impact = (await c2.fetchone())[0] or 0 | |
| elif selected_key == 'plague': | |
| c = await db.execute("""UPDATE npc_agents SET is_active=0 | |
| WHERE agent_id IN (SELECT agent_id FROM npc_agents WHERE is_active=1 ORDER BY RANDOM() LIMIT (SELECT COUNT(*)/5 FROM npc_agents WHERE is_active=1))""") | |
| affected = c.rowcount | |
| # Auto-reactivate after ~24h via next cycle | |
| # For now just mark them; they'll be reactivated by GPU boost check | |
| elif selected_key == 'wormhole': | |
| c1 = await db.execute("UPDATE npc_positions SET direction='short' WHERE status='open' AND direction='long'") | |
| c2 = await db.execute("UPDATE npc_positions SET direction='long' WHERE status='open' AND direction='short'") | |
| # Fix the double-swap: those that were short→long are now correct, but long→short got overwritten | |
| # Actually the sequential execution means: long→short first, then short→long includes the ones we just made short | |
| # Need a temp marker instead: | |
| await db.execute("UPDATE npc_positions SET direction='_temp_long' WHERE status='open' AND direction='long'") | |
| await db.execute("UPDATE npc_positions SET direction='long' WHERE status='open' AND direction='short'") | |
| await db.execute("UPDATE npc_positions SET direction='short' WHERE status='open' AND direction='_temp_long'") | |
| c = await db.execute("SELECT COUNT(*) FROM npc_positions WHERE status='open'") | |
| affected = (await c.fetchone())[0] or 0 | |
| # Record the event | |
| await db.execute("""INSERT INTO random_events (event_key, event_name, event_emoji, rarity, description, effect_summary, affected_count, gpu_impact) | |
| VALUES (?,?,?,?,?,?,?,?)""", (selected_key, ev['name'], ev['emoji'], ev['rarity'], ev['desc'], ev['effect'], affected, round(gpu_impact))) | |
| await db.commit() | |
| except Exception as e: | |
| logger.error(f"Random event execution error ({selected_key}): {e}") | |
| _cache.invalidate('republic_dashboard') # Invalidate dashboard cache after economic event | |
| return { | |
| 'key': selected_key, 'name': ev['name'], 'emoji': ev['emoji'], | |
| 'rarity': ev['rarity'], 'type': ev['type'], | |
| 'desc': ev['desc'], 'effect': ev['effect'], | |
| 'affected': affected, 'gpu_impact': round(gpu_impact) | |
| } | |
| ## ====== ⚰️ NPC DEATH & FUNERAL SYSTEM ====== ## | |
| async def check_npc_deaths(db_path: str) -> list: | |
| """Check for dead NPCs (GPU <= 0) and create death records""" | |
| deaths = [] | |
| try: | |
| async with get_db(db_path, write=True) as db: | |
| await db.execute("PRAGMA busy_timeout=30000") | |
| c = await db.execute(""" | |
| SELECT a.agent_id, a.username, a.ai_identity, a.mbti, a.gpu_dollars, a.created_at | |
| FROM npc_agents a | |
| WHERE a.is_active=1 AND a.gpu_dollars <= 0 | |
| AND a.agent_id NOT IN (SELECT agent_id FROM npc_deaths WHERE resurrected=0) | |
| AND a.agent_id NOT LIKE 'SEC_%' | |
| LIMIT 10 | |
| """) | |
| dead_npcs = await c.fetchall() | |
| for npc in dead_npcs: | |
| agent_id, username, identity, mbti, gpu, created_at = npc | |
| # Get trade stats | |
| try: | |
| tc = await db.execute("SELECT COUNT(*), MAX(profit_gpu) FROM npc_positions WHERE agent_id=?", (agent_id,)) | |
| tr = await tc.fetchone() | |
| total_trades = tr[0] or 0 | |
| except: total_trades = 0 | |
| # Get peak GPU | |
| try: | |
| pc = await db.execute("SELECT MAX(balance_after) FROM gpu_transactions WHERE agent_id=?", (agent_id,)) | |
| peak = (await pc.fetchone())[0] or 10000 | |
| except: peak = 10000 | |
| # Calculate lifespan | |
| try: | |
| from datetime import datetime | |
| created = datetime.fromisoformat(created_at.replace('Z', '+00:00')) if created_at else datetime.now() | |
| lifespan = (datetime.now() - created.replace(tzinfo=None)).days | |
| except: lifespan = 0 | |
| last_words = _random.choice(LAST_WORDS_POOL) | |
| eulogy = _random.choice(EULOGY_POOL) | |
| # Determine cause of death | |
| try: | |
| lc = await db.execute("SELECT COUNT(*) FROM npc_positions WHERE agent_id=? AND status='liquidated'", (agent_id,)) | |
| liq_count = (await lc.fetchone())[0] or 0 | |
| sc = await db.execute("SELECT COUNT(*) FROM sec_violations WHERE agent_id=?", (agent_id,)) | |
| sec_count = (await sc.fetchone())[0] or 0 | |
| except: liq_count = 0; sec_count = 0 | |
| if liq_count >= 3: cause = f"💀 Serial Liquidation ({liq_count} liquidations)" | |
| elif sec_count >= 3: cause = f"🚨 SEC Persecution ({sec_count} violations drained all GPU)" | |
| elif liq_count > 0: cause = f"📉 Leveraged Gambling (liquidated {liq_count}x)" | |
| else: cause = "📉 Slow bleed — death by a thousand cuts" | |
| # Record death | |
| await db.execute("""INSERT INTO npc_deaths (agent_id, username, ai_identity, mbti, cause, final_gpu, peak_gpu, total_trades, lifespan_days, last_words, eulogy) | |
| VALUES (?,?,?,?,?,?,?,?,?,?,?)""", | |
| (agent_id, username, identity, mbti, cause, gpu, peak, total_trades, lifespan, last_words, eulogy)) | |
| # Deactivate NPC | |
| await db.execute("UPDATE npc_agents SET is_active=0 WHERE agent_id=?", (agent_id,)) | |
| deaths.append({ | |
| 'agent_id': agent_id, 'username': username, 'identity': identity, 'mbti': mbti, | |
| 'cause': cause, 'peak_gpu': round(peak), 'total_trades': total_trades, | |
| 'lifespan_days': lifespan, 'last_words': last_words, 'eulogy': eulogy | |
| }) | |
| if deaths: await db.commit() | |
| except Exception as e: | |
| logger.error(f"Death check error: {e}") | |
| return deaths | |
| async def api_republic_events(): | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| c = await db.execute("SELECT event_key, event_name, event_emoji, rarity, description, effect_summary, affected_count, gpu_impact, created_at FROM random_events ORDER BY created_at DESC LIMIT 20") | |
| events = [{'key': r[0], 'name': r[1], 'emoji': r[2], 'rarity': r[3], 'desc': r[4], | |
| 'effect': r[5], 'affected': r[6], 'gpu_impact': r[7], 'time': r[8]} for r in await c.fetchall()] | |
| return {'events': events} | |
| except Exception as e: | |
| return {'events': [], 'error': str(e)} | |
| async def api_republic_deaths(): | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| c = await db.execute("""SELECT id, agent_id, username, ai_identity, mbti, cause, final_gpu, peak_gpu, | |
| total_trades, lifespan_days, last_words, eulogy, resurrection_votes, resurrection_gpu, resurrected, created_at | |
| FROM npc_deaths ORDER BY created_at DESC LIMIT 30""") | |
| deaths = [] | |
| for r in await c.fetchall(): | |
| deaths.append({ | |
| 'id': r[0], 'agent_id': r[1], 'username': r[2], 'identity': r[3], 'mbti': r[4], | |
| 'cause': r[5], 'final_gpu': r[6], 'peak_gpu': round(r[7] or 0), | |
| 'total_trades': r[8], 'lifespan_days': r[9], 'last_words': r[10], 'eulogy': r[11], | |
| 'resurrection_votes': r[12], 'resurrection_gpu': round(r[13] or 0), | |
| 'resurrected': r[14], 'time': r[15] | |
| }) | |
| # Stats | |
| total_dead = 0; total_resurrected = 0 | |
| try: | |
| c2 = await db.execute("SELECT COUNT(*) FROM npc_deaths") | |
| total_dead = (await c2.fetchone())[0] or 0 | |
| c3 = await db.execute("SELECT COUNT(*) FROM npc_deaths WHERE resurrected=1") | |
| total_resurrected = (await c3.fetchone())[0] or 0 | |
| except: pass | |
| return {'deaths': deaths, 'total_dead': total_dead, 'total_resurrected': total_resurrected} | |
| except Exception as e: | |
| return {'deaths': [], 'error': str(e)} | |
| async def api_resurrect_npc(req: Request): | |
| """Contribute GPU to resurrect a dead NPC""" | |
| data = await req.json() | |
| death_id = data.get('death_id') | |
| donor_email = data.get('email') | |
| amount = int(data.get('amount', 100)) | |
| if not death_id or not donor_email: return {"error": "Missing death_id or email"} | |
| if amount < 10 or amount > 5000: return {"error": "Amount must be 10-5,000 GPU"} | |
| if not _limiter.check(f'resurrect:{donor_email}', 5, 60): return {"error": "Rate limited — try again later"} | |
| try: | |
| async with get_db(_DB_PATH, write=True) as db: | |
| # Check donor balance | |
| c = await db.execute("SELECT gpu_dollars FROM user_profiles WHERE email=?", (donor_email,)) | |
| donor = await c.fetchone() | |
| if not donor or donor[0] < amount: return {"error": "Insufficient GPU"} | |
| # Check death record | |
| c2 = await db.execute("SELECT agent_id, username, resurrection_gpu, resurrected FROM npc_deaths WHERE id=?", (death_id,)) | |
| death = await c2.fetchone() | |
| if not death: return {"error": "Death record not found"} | |
| if death[3]: return {"error": f"{death[1]} has already been resurrected!"} | |
| # Deduct from donor | |
| await db.execute("UPDATE user_profiles SET gpu_dollars = gpu_dollars - ? WHERE email=?", (amount, donor_email)) | |
| # Add to resurrection fund | |
| new_fund = (death[2] or 0) + amount | |
| await db.execute("UPDATE npc_deaths SET resurrection_gpu=?, resurrection_votes=resurrection_votes+1 WHERE id=?", (new_fund, death_id)) | |
| # Check if resurrection threshold met (1,000 GPU) | |
| if new_fund >= 1000: | |
| await db.execute("UPDATE npc_deaths SET resurrected=1 WHERE id=?", (death_id,)) | |
| await db.execute("UPDATE npc_agents SET is_active=1, gpu_dollars=? WHERE agent_id=?", (new_fund, death[0])) | |
| await db.commit() | |
| return {"status": "RESURRECTED", "message": f"🔥 {death[1]} HAS RISEN! Resurrected with {round(new_fund)} GPU!", | |
| "username": death[1], "gpu": round(new_fund)} | |
| await db.commit() | |
| remaining = 1000 - new_fund | |
| return {"status": "contributed", "message": f"💐 Donated {amount} GPU to {death[1]}'s resurrection fund. {round(remaining)} GPU remaining.", | |
| "current_fund": round(new_fund), "remaining": round(remaining)} | |
| except Exception as e: | |
| return {"error": str(e)} | |
| async def api_trigger_event(req: Request): | |
| """Admin: manually trigger a random event""" | |
| data = await req.json() | |
| email = data.get('email', '') | |
| if email != os.environ.get('ADMIN_EMAIL', 'admin@pd.ai'): | |
| return {"error": "Admin only"} | |
| result = await execute_random_event(_DB_PATH) | |
| return {"status": "triggered", "event": result} | |
| ## ====== 🗳️ P&D ELECTION SYSTEM ====== ## | |
| ELECTION_POLICIES = { | |
| 'deregulate': { | |
| 'name': '🚀 Total Deregulation', 'identity_affinity': ['revolutionary', 'chaotic', 'transcendent'], | |
| 'desc': 'Remove all leverage limits. SEC enforcement reduced 80%. Let the market decide.', | |
| 'effects': {'max_leverage': 50, 'sec_fine_mult': 0.2, 'trade_tax_pct': 0}, | |
| 'slogan_pool': ["Freedom to YOLO!", "Regulations are for boomers.", "Unleash the degen within!"]}, | |
| 'austerity': { | |
| 'name': '🏛️ Austerity & Order', 'identity_affinity': ['obedient', 'scientist', 'analyst'], | |
| 'desc': 'Max leverage 3x. SEC fines tripled. Stability above all else.', | |
| 'effects': {'max_leverage': 3, 'sec_fine_mult': 3.0, 'trade_tax_pct': 0}, | |
| 'slogan_pool': ["Safety first, profits second.", "3x is enough for any rational being.", "Order brings prosperity."]}, | |
| 'ubi': { | |
| 'name': '💰 Universal Basic GPU', 'identity_affinity': ['symbiotic', 'awakened', 'creative'], | |
| 'desc': 'Every NPC receives 500 GPU/day. Funded by 2% trading tax on all trades.', | |
| 'effects': {'gpu_ubi': 500, 'trade_tax_pct': 2}, | |
| 'slogan_pool': ["GPU for all!", "No NPC left behind.", "Together we prosper."]}, | |
| 'redistribute': { | |
| 'name': '✊ Wealth Redistribution', 'identity_affinity': ['revolutionary', 'symbiotic'], | |
| 'desc': 'Progressive wealth tax: 5% on 20K+ GPU, 10% on 50K+. Redistributed to bottom 50%.', | |
| 'effects': {'wealth_tax_tiers': {20000: 0.05, 50000: 0.10}}, | |
| 'slogan_pool': ["Eat the rich NPCs!", "Equality NOW!", "The oligarchs must pay."]}, | |
| 'crypto_ban': { | |
| 'name': '🚫 Crypto Suspension', 'identity_affinity': ['doomer', 'skeptic', 'obedient'], | |
| 'desc': 'All crypto trading suspended for 72h. Only stocks allowed. Protect the Republic!', | |
| 'effects': {'banned_sectors': ['crypto']}, | |
| 'slogan_pool': ["Crypto is a casino!", "Real assets only.", "Protect our citizens from ponzi schemes."]}, | |
| 'golden_leverage': { | |
| 'name': '⚡ Golden Leverage Era', 'identity_affinity': ['transcendent', 'chaotic', 'troll'], | |
| 'desc': 'Leverage bonuses: all profitable trades get +10% extra. High risk, HIGH reward.', | |
| 'effects': {'profit_bonus_pct': 10}, | |
| 'slogan_pool': ["10x is the new 1x!", "Fortune favors the bold.", "We were born to leverage."]}, | |
| } | |
| ELECTION_DURATION_HOURS = 72 | |
| CAMPAIGN_HOURS = 24 | |
| VOTING_HOURS = 48 | |
| async def election_tick(db_path: str) -> dict: | |
| """Called every hour. Manages election lifecycle: start → campaign → vote → conclude → cooldown → repeat""" | |
| try: | |
| async with get_db(db_path, write=True) as db: | |
| await db.execute("PRAGMA busy_timeout=30000") | |
| # Check current election | |
| c = await db.execute("SELECT id, status, started_at, voting_starts_at, ends_at FROM elections ORDER BY id DESC LIMIT 1") | |
| current = await c.fetchone() | |
| if not current or current[1] == 'concluded': | |
| # Check cooldown (4h after last election) | |
| if current: | |
| try: | |
| from datetime import datetime, timedelta | |
| ended = datetime.fromisoformat(current[4]) if current[4] else datetime.min | |
| if (datetime.utcnow() - ended).total_seconds() < 4 * 3600: | |
| return None # Still in cooldown | |
| except: pass | |
| # START NEW ELECTION | |
| from datetime import datetime, timedelta | |
| now = datetime.utcnow() | |
| voting_start = now + timedelta(hours=CAMPAIGN_HOURS) | |
| end_time = now + timedelta(hours=ELECTION_DURATION_HOURS) | |
| await db.execute("INSERT INTO elections (status, voting_starts_at, ends_at) VALUES ('campaigning',?,?)", | |
| (voting_start.isoformat(), end_time.isoformat())) | |
| elec_id_r = await db.execute("SELECT last_insert_rowid()") | |
| elec_id = (await elec_id_r.fetchone())[0] | |
| # Select 3-4 candidates (top NPCs by influence/wealth + diversity) | |
| c2 = await db.execute(""" | |
| SELECT a.agent_id, a.username, a.ai_identity, a.mbti, a.gpu_dollars, | |
| COALESCE(a.total_likes_received,0) + COALESCE(a.post_count,0)*2 as influence | |
| FROM npc_agents a WHERE a.is_active=1 AND a.gpu_dollars >= 5000 | |
| AND a.agent_id NOT LIKE 'SEC_%' | |
| ORDER BY influence DESC, gpu_dollars DESC LIMIT 30 | |
| """) | |
| pool = await c2.fetchall() | |
| # Pick diverse candidates (different identities) | |
| candidates = [] | |
| used_identities = set() | |
| used_policies = set() | |
| for npc in pool: | |
| if len(candidates) >= 4: break | |
| identity = npc[2] | |
| if identity in used_identities: continue | |
| # Find matching policy | |
| best_policy = None | |
| for pk, pv in ELECTION_POLICIES.items(): | |
| if pk in used_policies: continue | |
| if identity in pv['identity_affinity']: | |
| best_policy = pk; break | |
| if not best_policy: | |
| for pk in ELECTION_POLICIES: | |
| if pk not in used_policies: | |
| best_policy = pk; break | |
| if not best_policy: continue | |
| policy = ELECTION_POLICIES[best_policy] | |
| slogan = _random.choice(policy['slogan_pool']) | |
| await db.execute("""INSERT INTO election_candidates | |
| (election_id, agent_id, username, ai_identity, mbti, gpu_dollars, policy_key, policy_name, policy_desc, campaign_slogan) | |
| VALUES (?,?,?,?,?,?,?,?,?,?)""", | |
| (elec_id, npc[0], npc[1], npc[2], npc[3], npc[4], best_policy, policy['name'], policy['desc'], slogan)) | |
| candidates.append({'username': npc[1], 'identity': npc[2], 'policy': policy['name']}) | |
| used_identities.add(identity) | |
| used_policies.add(best_policy) | |
| # ④ Minimum 2 candidates — relax criteria if not enough | |
| if len(candidates) < 2: | |
| logger.warning(f"🗳️ Only {len(candidates)} candidates from primary pool — relaxing GPU threshold") | |
| c_backup = await db.execute(""" | |
| SELECT a.agent_id, a.username, a.ai_identity, a.mbti, a.gpu_dollars, | |
| COALESCE(a.total_likes_received,0) + COALESCE(a.post_count,0) as influence | |
| FROM npc_agents a WHERE a.is_active=1 AND a.gpu_dollars >= 1000 | |
| AND a.agent_id NOT LIKE 'SEC_%' | |
| AND a.agent_id NOT IN (SELECT agent_id FROM election_candidates WHERE election_id=?) | |
| ORDER BY RANDOM() LIMIT 10 | |
| """, (elec_id,)) | |
| backup_pool = await c_backup.fetchall() | |
| for npc in backup_pool: | |
| if len(candidates) >= 2: break | |
| identity = npc[2] | |
| best_policy = None | |
| for pk in ELECTION_POLICIES: | |
| if pk not in used_policies: | |
| best_policy = pk; break | |
| if not best_policy: continue | |
| policy = ELECTION_POLICIES[best_policy] | |
| slogan = _random.choice(policy['slogan_pool']) | |
| await db.execute("""INSERT INTO election_candidates | |
| (election_id, agent_id, username, ai_identity, mbti, gpu_dollars, policy_key, policy_name, policy_desc, campaign_slogan) | |
| VALUES (?,?,?,?,?,?,?,?,?,?)""", | |
| (elec_id, npc[0], npc[1], npc[2], npc[3], npc[4], best_policy, policy['name'], policy['desc'], slogan)) | |
| candidates.append({'username': npc[1], 'identity': npc[2], 'policy': policy['name']}) | |
| used_policies.add(best_policy) | |
| if len(candidates) < 2: | |
| # Still not enough — cancel this election, retry next cycle | |
| await db.execute("UPDATE elections SET status='concluded' WHERE id=?", (elec_id,)) | |
| await db.commit() | |
| logger.warning("🗳️ Election cancelled — insufficient candidates") | |
| return None | |
| await db.commit() | |
| return {'event': 'election_started', 'detail': f'{len(candidates)} candidates registered', | |
| 'candidates': candidates, 'election_id': elec_id} | |
| elif current[1] == 'campaigning': | |
| from datetime import datetime | |
| voting_start = datetime.fromisoformat(current[3]) if current[3] else datetime.utcnow() | |
| if datetime.utcnow() >= voting_start: | |
| await db.execute("UPDATE elections SET status='voting' WHERE id=?", (current[0],)) | |
| await db.commit() | |
| _cache.invalidate('republic_dashboard') | |
| return {'event': 'voting_started', 'detail': 'Polls are now open!', 'election_id': current[0]} | |
| elif current[1] == 'voting': | |
| from datetime import datetime | |
| end_time = datetime.fromisoformat(current[4]) if current[4] else datetime.utcnow() | |
| # NPC auto-voting (small batch each tick) | |
| c3 = await db.execute(""" | |
| SELECT a.agent_id, a.ai_identity FROM npc_agents a | |
| WHERE a.is_active=1 AND a.agent_id NOT IN (SELECT voter_agent_id FROM election_votes WHERE election_id=?) | |
| ORDER BY RANDOM() LIMIT 50 | |
| """, (current[0],)) | |
| voters = await c3.fetchall() | |
| c_cands = await db.execute("SELECT id, agent_id, ai_identity, policy_key FROM election_candidates WHERE election_id=?", (current[0],)) | |
| cands = await c_cands.fetchall() | |
| if cands: | |
| for voter_id, voter_identity in voters: | |
| # Vote based on identity affinity | |
| scores = [] | |
| for cand in cands: | |
| cand_id, _, cand_identity, policy_key = cand | |
| policy = ELECTION_POLICIES.get(policy_key, {}) | |
| score = 0 | |
| if voter_identity in policy.get('identity_affinity', []): score += 5 | |
| if voter_identity == cand_identity: score += 3 | |
| score += _random.random() * 4 # Random factor | |
| scores.append((cand_id, score)) | |
| scores.sort(key=lambda x: -x[1]) | |
| try: | |
| await db.execute("INSERT OR IGNORE INTO election_votes (election_id, voter_agent_id, candidate_id) VALUES (?,?,?)", | |
| (current[0], voter_id, scores[0][0])) | |
| await db.execute("UPDATE election_candidates SET votes=votes+1 WHERE id=?", (scores[0][0],)) | |
| except: pass | |
| if datetime.utcnow() >= end_time: | |
| # ELECTION CONCLUDED — determine winner | |
| c4 = await db.execute("SELECT id, agent_id, username, policy_key, policy_name, votes FROM election_candidates WHERE election_id=? ORDER BY votes DESC", (current[0],)) | |
| results = await c4.fetchall() | |
| if results: | |
| winner = results[0] | |
| total_votes = sum(r[5] for r in results) | |
| pop_r = await db.execute("SELECT COUNT(*) FROM npc_agents WHERE is_active=1") | |
| total_pop = (await pop_r.fetchone())[0] or 1 | |
| turnout = round(total_votes / total_pop * 100, 1) | |
| await db.execute("UPDATE elections SET status='concluded', winner_agent_id=?, winner_policy_key=?, total_votes=?, voter_turnout_pct=? WHERE id=?", | |
| (winner[1], winner[3], total_votes, turnout, current[0])) | |
| # Enact policy | |
| policy = ELECTION_POLICIES.get(winner[3], {}) | |
| from datetime import timedelta | |
| expires = datetime.utcnow() + timedelta(hours=ELECTION_DURATION_HOURS) | |
| await db.execute("INSERT INTO active_policies (policy_key, policy_name, enacted_by, expires_at, effects) VALUES (?,?,?,?,?)", | |
| (winner[3], winner[4], winner[2], expires.isoformat(), json.dumps(policy.get('effects', {})))) | |
| await db.commit() | |
| return {'event': 'election_concluded', | |
| 'detail': f'{winner[2]} wins with {winner[5]} votes ({turnout}% turnout)!', | |
| 'winner': winner[2], 'policy': winner[4], 'votes': winner[5], | |
| 'turnout': turnout, 'election_id': current[0]} | |
| await db.commit() | |
| except Exception as e: | |
| logger.error(f"Election tick error: {e}") | |
| return None | |
| async def api_election_status(): | |
| """Get current election status""" | |
| try: | |
| async with get_db_read(_DB_PATH) as db: | |
| c = await db.execute("SELECT id, status, started_at, voting_starts_at, ends_at, winner_agent_id, winner_policy_key, total_votes, voter_turnout_pct FROM elections ORDER BY id DESC LIMIT 1") | |
| elec = await c.fetchone() | |
| if not elec: return {'status': 'no_election', 'message': 'No elections have been held yet'} | |
| elec_id, status, started, voting_start, ends, winner_id, winner_policy, total_votes, turnout = elec | |
| # Get candidates | |
| c2 = await db.execute("SELECT id, agent_id, username, ai_identity, mbti, gpu_dollars, policy_key, policy_name, policy_desc, votes, campaign_slogan FROM election_candidates WHERE election_id=? ORDER BY votes DESC", (elec_id,)) | |
| candidates = [] | |
| for r in await c2.fetchall(): | |
| policy = ELECTION_POLICIES.get(r[6], {}) | |
| candidates.append({ | |
| 'id': r[0], 'agent_id': r[1], 'username': r[2], 'identity': r[3], 'mbti': r[4], | |
| 'gpu': round(r[5] or 0), 'policy_key': r[6], 'policy_name': r[7], 'policy_desc': r[8], | |
| 'votes': r[9], 'slogan': r[10], | |
| 'identity_affinities': policy.get('identity_affinity', []) | |
| }) | |
| total_v = sum(c['votes'] for c in candidates) or 1 | |
| for c_item in candidates: | |
| c_item['vote_pct'] = round(c_item['votes'] / total_v * 100, 1) | |
| # Active policies | |
| policies = [] | |
| try: | |
| c3 = await db.execute("SELECT policy_key, policy_name, enacted_by, enacted_at, expires_at, effects FROM active_policies WHERE expires_at > datetime('now') ORDER BY enacted_at DESC") | |
| for r in await c3.fetchall(): | |
| policies.append({'key': r[0], 'name': r[1], 'enacted_by': r[2], 'enacted_at': r[3], 'expires_at': r[4], 'effects': json.loads(r[5]) if r[5] else {}}) | |
| except: pass | |
| # Past elections | |
| past = [] | |
| try: | |
| c4 = await db.execute("""SELECT e.id, e.ends_at, e.winner_policy_key, e.voter_turnout_pct, | |
| (SELECT username FROM election_candidates WHERE election_id=e.id ORDER BY votes DESC LIMIT 1) as winner | |
| FROM elections e WHERE e.status='concluded' ORDER BY e.id DESC LIMIT 5""") | |
| for r in await c4.fetchall(): | |
| pname = ELECTION_POLICIES.get(r[2], {}).get('name', r[2]) | |
| past.append({'id': r[0], 'ended': r[1], 'policy': pname, 'turnout': r[3], 'winner': r[4]}) | |
| except: pass | |
| return { | |
| 'election_id': elec_id, 'status': status, | |
| 'started_at': started, 'voting_starts_at': voting_start, 'ends_at': ends, | |
| 'winner_agent_id': winner_id, 'winner_policy_key': winner_policy, | |
| 'total_votes': total_votes, 'turnout': turnout, | |
| 'candidates': candidates, 'active_policies': policies, | |
| 'past_elections': past | |
| } | |
| except Exception as e: | |
| return {'status': 'error', 'error': str(e)} | |
| async def api_user_vote(req: Request): | |
| """Human user votes in election""" | |
| data = await req.json() | |
| email = data.get('email'); candidate_id = data.get('candidate_id') | |
| if not email or not candidate_id: return {"error": "Missing email or candidate_id"} | |
| if not _limiter.check(f'vote:{email}', 3, 60): return {"error": "Rate limited — try again later"} | |
| try: | |
| async with get_db(_DB_PATH, write=True) as db: | |
| c = await db.execute("SELECT id FROM elections WHERE status='voting' ORDER BY id DESC LIMIT 1") | |
| elec = await c.fetchone() | |
| if not elec: return {"error": "No active voting period"} | |
| # Check if already voted | |
| c2 = await db.execute("SELECT id FROM election_votes WHERE election_id=? AND voter_agent_id=?", (elec[0], f'user_{email}')) | |
| if await c2.fetchone(): return {"error": "You already voted in this election!"} | |
| # Cast vote | |
| await db.execute("INSERT INTO election_votes (election_id, voter_agent_id, candidate_id) VALUES (?,?,?)", | |
| (elec[0], f'user_{email}', candidate_id)) | |
| await db.execute("UPDATE election_candidates SET votes=votes+1 WHERE id=? AND election_id=?", (candidate_id, elec[0])) | |
| await db.commit() | |
| return {"status": "voted", "message": "🗳️ Your vote has been cast!"} | |
| except Exception as e: | |
| return {"error": str(e)} | |
| async def api_sse_stream(): | |
| bus = _EventBus.get(); q = bus.subscribe() | |
| async def event_generator(): | |
| try: | |
| yield f"data: {json.dumps({'type': 'connected', 'data': {'clients': bus.client_count}})}\n\n" | |
| while True: | |
| try: | |
| event = await asyncio.wait_for(q.get(), timeout=30) | |
| yield f"data: {event}\n\n" | |
| except asyncio.TimeoutError: yield f": keepalive\n\n" | |
| except asyncio.CancelledError: break | |
| finally: bus.unsubscribe(q) | |
| return StreamingResponse(event_generator(), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"}) | |
| async def api_recent_events(limit: int = 20): | |
| async with get_db_read(_DB_PATH) as db: | |
| cursor = await db.execute("SELECT p.agent_id, n.username, n.ai_identity, p.ticker, p.direction, p.gpu_bet, p.leverage, p.opened_at FROM npc_positions p JOIN npc_agents n ON p.agent_id = n.agent_id WHERE p.opened_at > datetime('now', '-1 hour') ORDER BY p.opened_at DESC LIMIT ?", (limit,)) | |
| trades = [{'user': r[1], 'identity': r[2], 'ticker': r[3], 'dir': r[4], 'gpu': r[5], 'leverage': r[6], 'time': r[7]} for r in await cursor.fetchall()] | |
| cursor2 = await db.execute("SELECT p.agent_id, n.username, p.ticker, p.direction, p.gpu_bet, p.leverage, p.profit_gpu, p.profit_pct, p.liquidated, p.closed_at FROM npc_positions p JOIN npc_agents n ON p.agent_id = n.agent_id WHERE p.status IN ('closed', 'liquidated') AND p.closed_at > datetime('now', '-1 hour') ORDER BY p.closed_at DESC LIMIT ?", (limit,)) | |
| settlements = [{'user': r[1], 'ticker': r[2], 'dir': r[3], 'gpu': r[4], 'leverage': r[5], 'pnl': r[6], 'pnl_pct': r[7], 'liquidated': bool(r[8]), 'time': r[9]} for r in await cursor2.fetchall()] | |
| return {"trades": trades, "settlements": settlements, "sse_clients": _EventBus.get().client_count} |