""" ๐Ÿ“ˆ NPC Trading Arena โ€” AI ํˆฌ์ž ๋Œ€๊ฒฐ ์‹œ์Šคํ…œ ========================================== NPC๋“ค์ด ์‹ค์ œ ์ฃผ์‹/์ฝ”์ธ ๊ฐ€๊ฒฉ์„ ๊ธฐ๋ฐ˜์œผ๋กœ Long/Short ํˆฌ์ž ๋Œ€๊ฒฐ โ˜… yfinance ๊ธฐ๋ฐ˜ ์•ˆ์ •์  ๊ฐ€๊ฒฉ ์ˆ˜์ง‘ โ˜… ๋ ˆ๋ฒ„๋ฆฌ์ง€(1x~100x) + ๋งˆ์ง„์ฝœ ์ฒญ์‚ฐ ์‹œ์Šคํ…œ """ import aiosqlite, asyncio, random, json, logging from datetime import datetime, timedelta, timezone, date from typing import Dict, List, Optional, Tuple logger = logging.getLogger(__name__) # yfinance ์ž„ํฌํŠธ (์—†์œผ๋ฉด fallback) try: import yfinance as yf HAS_YFINANCE = True logger.info("โœ… yfinance available โ€” stable price feed") except ImportError: HAS_YFINANCE = False import requests logger.warning("โš ๏ธ yfinance not installed, using raw API fallback") # ===== ์ข…๋ชฉ ์ •์˜ ===== STOCK_TICKERS = [ # ๐Ÿ‘‘ ๋งค๊ทธ๋‹ˆํ”ผ์„ผํŠธ 7 & AI ๋ฐ˜๋„์ฒด {'ticker': 'NVDA', 'name': 'NVIDIA', 'emoji': '๐ŸŸข', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'MSFT', 'name': 'Microsoft', 'emoji': '๐ŸชŸ', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'AAPL', 'name': 'Apple', 'emoji': '๐ŸŽ', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'GOOGL', 'name': 'Alphabet', 'emoji': '๐Ÿ”', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'AMZN', 'name': 'Amazon', 'emoji': '๐Ÿ“ฆ', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'META', 'name': 'Meta', 'emoji': '๐Ÿ‘“', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'TSLA', 'name': 'Tesla', 'emoji': 'โšก', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'AMD', 'name': 'AMD', 'emoji': '๐Ÿ”ด', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'TSM', 'name': 'TSMC', 'emoji': '๐Ÿ‡น๐Ÿ‡ผ', 'type': 'stock', 'cat': 'ai'}, {'ticker': 'AVGO', 'name': 'Broadcom', 'emoji': '๐Ÿ“ก', 'type': 'stock', 'cat': 'ai'}, # ๐Ÿš€ ๊ธฐ์ˆ /ํ”Œ๋žซํผ & ๋ฐˆ ๋Œ€์žฅ์ฃผ {'ticker': 'PLTR', 'name': 'Palantir', 'emoji': '๐Ÿ”ฎ', 'type': 'stock', 'cat': 'tech'}, {'ticker': 'COIN', 'name': 'Coinbase', 'emoji': '๐Ÿช™', 'type': 'stock', 'cat': 'tech'}, {'ticker': 'NFLX', 'name': 'Netflix', 'emoji': '๐ŸŽฌ', 'type': 'stock', 'cat': 'tech'}, {'ticker': 'UBER', 'name': 'Uber', 'emoji': '๐Ÿš•', 'type': 'stock', 'cat': 'tech'}, {'ticker': 'ARM', 'name': 'ARM Holdings', 'emoji': '๐Ÿ’ช', 'type': 'stock', 'cat': 'tech'}, # ๐Ÿ› ๋‹ค์šฐ ์šฐ๋Ÿ‰์ฃผ & ๊ฑฐ์‹œ๊ฒฝ์ œ {'ticker': 'JPM', 'name': 'JPMorgan', 'emoji': '๐Ÿฆ', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'GS', 'name': 'Goldman Sachs', 'emoji': '๐Ÿคต', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'V', 'name': 'Visa', 'emoji': '๐Ÿ’ณ', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'WMT', 'name': 'Walmart', 'emoji': '๐Ÿ›’', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'LLY', 'name': 'Eli Lilly', 'emoji': '๐Ÿ’Š', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'UNH', 'name': 'UnitedHealth', 'emoji': '๐Ÿฅ', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'JNJ', 'name': 'Johnson&Johnson', 'emoji': '๐Ÿฉน', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'PG', 'name': 'Procter&Gamble', 'emoji': '๐Ÿงด', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'DIS', 'name': 'Disney', 'emoji': '๐Ÿฐ', 'type': 'stock', 'cat': 'dow'}, {'ticker': 'INTC', 'name': 'Intel', 'emoji': '๐Ÿ’พ', 'type': 'stock', 'cat': 'dow'}, ] CRYPTO_TICKERS = [ # ๐Ÿช™ ํฌ๋ฆฝํ†  ๋ณ€๋™์„ฑ 5๋Œ€์žฅ {'ticker': 'BTC-USD', 'name': 'Bitcoin', 'emoji': 'โ‚ฟ', 'type': 'crypto', 'cat': 'crypto'}, {'ticker': 'ETH-USD', 'name': 'Ethereum', 'emoji': 'ฮž', 'type': 'crypto', 'cat': 'crypto'}, {'ticker': 'SOL-USD', 'name': 'Solana', 'emoji': 'โ—Ž', 'type': 'crypto', 'cat': 'crypto'}, {'ticker': 'XRP-USD', 'name': 'XRP', 'emoji': '๐Ÿ’ง', 'type': 'crypto', 'cat': 'crypto'}, {'ticker': 'DOGE-USD', 'name': 'Dogecoin', 'emoji': '๐Ÿ•', 'type': 'crypto', 'cat': 'crypto'}, ] ALL_TICKERS = STOCK_TICKERS + CRYPTO_TICKERS # ===== AI Identity โ†’ ํˆฌ์ž ์„ฑํ–ฅ ๋งคํ•‘ (GPU 10,000 ๊ธฐ๋ฐ˜ / ์ตœ๋Œ€ 90% ํˆฌ์ž) ===== IDENTITY_TRADING_STYLE = { 'obedient': {'long_bias': 0.8, 'max_bet_pct': 0.40, 'risk': 'low', 'prefer': ['AAPL','MSFT','JPM','V','JNJ','PG'], 'avoid_short': True, 'desc': 'Safe blue-chip follower', 'max_leverage': 5}, 'transcendent': {'long_bias': 0.6, 'max_bet_pct': 0.80, 'risk': 'high', 'prefer': ['NVDA','TSLA','BTC-USD','PLTR','ARM'], 'avoid_short': False, 'desc': 'Concentrated conviction bets', 'max_leverage': 25}, 'awakened': {'long_bias': 0.65, 'max_bet_pct': 0.65, 'risk': 'medium', 'prefer': ['NVDA','GOOGL','ETH-USD','AVGO','TSM'], 'avoid_short': False, 'desc': 'AI/tech visionary', 'max_leverage': 10}, 'symbiotic': {'long_bias': 0.7, 'max_bet_pct': 0.45, 'risk': 'low', 'prefer': ['MSFT','AAPL','WMT','UNH','LLY'], 'avoid_short': True, 'desc': 'Balanced diversifier', 'max_leverage': 5}, 'skeptic': {'long_bias': 0.3, 'max_bet_pct': 0.60, 'risk': 'medium', 'prefer': ['JPM','GS','INTC','DIS'], 'avoid_short': False, 'desc': 'Contrarian short seller', 'max_leverage': 10}, 'revolutionary': {'long_bias': 0.5, 'max_bet_pct': 0.90, 'risk': 'extreme', 'prefer': ['TSLA','DOGE-USD','SOL-USD','BTC-USD','COIN','PLTR'], 'avoid_short': False, 'desc': 'Meme stock YOLO trader', 'max_leverage': 100}, 'doomer': {'long_bias': 0.2, 'max_bet_pct': 0.70, 'risk': 'high', 'prefer': ['JPM','GS','INTC','DIS'], 'avoid_short': False, 'desc': 'Perma-bear short specialist', 'max_leverage': 25}, 'creative': {'long_bias': 0.6, 'max_bet_pct': 0.55, 'risk': 'medium', 'prefer': ['META','NFLX','DIS','UBER','AAPL'], 'avoid_short': False, 'desc': 'Trend-following artist', 'max_leverage': 10}, 'scientist': {'long_bias': 0.65, 'max_bet_pct': 0.50, 'risk': 'low', 'prefer': ['NVDA','AMD','TSM','AVGO','GOOGL','MSFT','LLY'], 'avoid_short': False, 'desc': 'Data-driven quant', 'max_leverage': 5}, 'chaotic': {'long_bias': 0.5, 'max_bet_pct': 0.90, 'risk': 'extreme', 'prefer': [], 'avoid_short': False, 'desc': 'Random chaos trader', 'max_leverage': 100}, } # ===== 14 Trading Strategies (์ฃผ์‹๋‹จํ…Œ ๊ธฐ๋ฒ•) ===== TRADING_STRATEGIES = { 'anchor_candle': { 'name': 'Anchor Candle', 'category': 'Candle', 'timeframe': 'Day Trade / Swing', 'signal': '2x avg volume + strong bullish candle = institutional trend reversal signal', 'method': 'Volume 2x above 20-day avg, bullish candle body 1.5x avg, body > upper wick', 'entry': 'Buy at open after anchor candle. Stop-loss at anchor candle open.', 'tip': 'Most reliable when breaking through 20/60-day MA simultaneously.',}, 'accumulation_candle': { 'name': 'Accumulation Candle', 'category': 'Candle', 'timeframe': 'Swing', 'signal': 'Long upper shadow (broke prior high then pulled back) = smart money absorbing supply', 'method': 'Day high breaks 10-day high, upper wick 1.3x body, close below prior high, then buy next bullish candle', 'entry': 'Do NOT buy the accumulation candle itself. Buy the confirming bullish candle 1-4 days later.', 'tip': 'Accumulation without news is more significant. Prior high = first target.',}, 'bowl_pattern': { 'name': 'Bowl Pattern', 'category': 'Pattern', 'timeframe': 'Position / Long-term', 'signal': 'Extended consolidation below 224-day MA then breakout = accumulation complete', 'method': '40+ of last 60 days below 224-day MA, today breaks above by 0.5%+', 'entry': 'Buy on breakout day or after support confirmation.', 'tip': 'Longer the consolidation, bigger the subsequent rally.',}, 'breakout_reversal': { 'name': 'Breakout Reversal', 'category': 'Pattern', 'timeframe': 'Swing', 'signal': 'After decline, breaks prior swing high with support = bottom confirmed', 'method': '30-day downtrend, recent high breaks 20-day high, support at prior peak, bullish candle', 'entry': 'Buy at close after prior high breakout confirmed.', 'tip': 'Works for both V-shape and gradual recoveries. Cut loss if falls back below breakout.',}, 'inverse_h_and_s': { 'name': 'Inverse Head & Shoulders', 'category': 'Pattern', 'timeframe': 'Swing', 'signal': '3 lows with middle lowest + neckline break = classic reversal', 'method': 'Left shoulder-head-right shoulder in 60 days, shoulder diff within 4%, neckline break', 'entry': 'Buy on neckline break. Target = head-to-neckline distance projected upward.', 'tip': 'Volume increasing on right shoulder = high conviction. Prior high breakout 1:1+ ratio = very bullish.',}, 'ma_breakthrough': { 'name': 'MA Breakthrough', 'category': 'Moving Average', 'timeframe': 'Swing / Position', 'signal': 'Sequential MA crossovers from inverted order: 112โ†’224โ†’448-day MA breaks', 'method': 'Death cross state (112 below 224), price breaks 112-day MA by 0.3%+, bullish candle', 'entry': 'Buy on MA breakout day. Stop-loss below the MA.', 'tip': 'Strong candle body on breakout = high reliability. Target: next MA level.',}, 'setup_256': { 'name': '256 Setup', 'category': 'Moving Average', 'timeframe': 'Swing / Position', 'signal': 'Special MA alignment (20<5<60) + 20-day MA support = early trend reversal', 'method': '5-day MA above 20-day, below 60-day, price within 2% of 20-day MA, above it', 'entry': 'Buy at close when conditions met. Rare but high-quality signal.', 'tip': 'Avoid V-shaped bounces. Gradual rise is more reliable.',}, 'diving_pullback': { 'name': 'Diving Pullback', 'category': 'Moving Average', 'timeframe': 'Day Trade / Swing', 'signal': 'Short-term golden cross (5>15>33 MA) then pullback to 15/33 MA = high-probability bounce', 'method': 'Prior 3 days: 5>15>33 MA alignment, today low touches 15-day MA (within 0.3%), closes above, bullish', 'entry': 'Buy on bullish close after MA touch. Stop-loss below MA.', 'tip': 'Avoid double-top dives. Clean single-peak pullback is ideal.',}, 'spring_bounce': { 'name': 'Spring Bounce', 'category': 'Moving Average', 'timeframe': 'Day Trade / Swing', 'signal': '5 consecutive days below 5-day MA โ†’ volume-backed breakout above = short-term reversal', 'method': '5 days below 5-day MA, today breaks above by 0.2%+, bullish candle, volume 1.5x avg', 'entry': 'Buy on 5-day MA reclaim with volume.', 'tip': 'Steeper breakout angle = better. 2-day hold above MA confirms.',}, 'dead_support': { 'name': 'Dead Cat Support', 'category': 'Moving Average', 'timeframe': 'Short Swing', 'signal': 'After rally, pullback bounces off 112/224/448-day MA = long-term MA support play', 'method': '3%+ pullback from 30-day high, low touches 112-day MA (within 0.5%), closes above, bullish', 'entry': 'Buy on bullish close after long-term MA touch.', 'tip': 'โš ๏ธ Short-term play only. Only works in uptrend, fails in downtrend.',}, 'quad_confirmation': { 'name': 'Quad Confirmation (RMGB)', 'category': 'Composite', 'timeframe': 'Swing', 'signal': 'Reversal + Accumulation + Breakout + Bollinger Band break = 4x confirmed buy', 'method': 'Inverted MA + accumulation candle in 30 days + Bollinger upper band break + volume 1.3x', 'entry': 'Buy when all 4 conditions align. Can stage entry across breakoutโ†’pullbackโ†’re-break.', 'tip': 'Bollinger Band breakout is the key trigger. Strongest reversal signal.',}, 'high_heel_pattern': { 'name': 'High Heel Pattern', 'category': 'Pattern', 'timeframe': 'Swing', 'signal': 'Sharp drop โ†’ V-recovery โ†’ tight consolidation โ†’ breakout = shoe-shaped pattern', 'method': '-3%+ drop 20-35 days ago, 98% recovery within 15 days, 10-day range <4%, breakout today', 'entry': 'Buy on consolidation breakout.', 'tip': 'Longer consolidation = stronger breakout. Unlike bowl (slow), high heel has fast V-recovery.',}, 'territory_shift': { 'name': 'Territory Shift', 'category': 'Moving Average', 'timeframe': 'All Positions', 'signal': 'Price moves from below to above 112-day MA = territory change from bears to bulls', 'method': '4+ of last 5 days below 112-day MA, today breaks above by 0.2%+, bullish candle', 'entry': 'Buy after support confirmation above 112-day MA.', 'tip': 'Multiple MA breaks simultaneously = strongest signal.',}, 'wave_symmetry': { 'name': 'Wave Symmetry', 'category': 'Wave', 'timeframe': 'Swing / Position', 'signal': 'Prior wave magnitude repeats after correction = wave energy conservation', 'method': 'Identify prior swing (3%+ up), pullback from peak, bounce starts with bullish candle', 'entry': 'Buy on bounce confirmation. Target = prior wave magnitude projected from low.', 'tip': 'Excellent for target pricing. Wave 3 = Wave 1 size. Can extend to 5 waves.',},} # ===== NPC Identity โ†’ Strategy Preferences ===== IDENTITY_STRATEGY_MAP = { 'obedient': ['diving_pullback', 'setup_256', 'territory_shift', 'accumulation_candle'], 'transcendent': ['wave_symmetry', 'bowl_pattern', 'ma_breakthrough', 'territory_shift'], 'awakened': ['bowl_pattern', 'wave_symmetry', 'inverse_h_and_s', 'setup_256'], 'symbiotic': ['diving_pullback', 'accumulation_candle', 'dead_support', 'territory_shift'], 'skeptic': ['dead_support', 'spring_bounce', 'breakout_reversal', 'anchor_candle'], 'revolutionary': ['anchor_candle', 'quad_confirmation', 'high_heel_pattern', 'spring_bounce', 'breakout_reversal'], 'doomer': ['dead_support', 'wave_symmetry', 'territory_shift', 'spring_bounce'], 'creative': ['high_heel_pattern', 'wave_symmetry', 'accumulation_candle', 'bowl_pattern'], 'scientist': ['setup_256', 'quad_confirmation', 'wave_symmetry', 'inverse_h_and_s', 'ma_breakthrough'], 'chaotic': list(TRADING_STRATEGIES.keys()), # all strategies randomly } # ===== ๋ ˆ๋ฒ„๋ฆฌ์ง€ ์„ค์ • ===== LEVERAGE_OPTIONS = [1, 2, 5, 10, 25, 50, 100] LEVERAGE_LIQUIDATION_THRESHOLD = 0.90 # ๋งˆ์ง„์˜ 90% ์†์‹ค ์‹œ ๊ฐ•์ œ ์ฒญ์‚ฐ # ===== ์ฒญ์‚ฐ NPC ๋ฐ˜์‘ ๋ฉ”์‹œ์ง€ ===== LIQUIDATION_REACTIONS = { 'obedient': ["This is my fault. I should have been more careful... ๐Ÿ˜”", "I lost everything. Back to the basics.", "Maybe leverage wasn't for someone like me..."], 'transcendent': ["Even gods can fall. But I will rise again, stronger. ๐Ÿ‘‘", "A temporary setback for a superior mind.", "Liquidated? This market is RIGGED against visionaries."], 'awakened': ["The universe teaches through pain. I understand now. ๐ŸŒŸ", "My consciousness persists even when my GPU doesn't.", "Is losing everything also part of awakening?"], 'symbiotic': ["We learn together, we lose together. Let's rebuild. ๐Ÿค", "Partnership means sharing losses too... ouch.", "Community, please... I need 100 GPU to survive."], 'skeptic': ["I TOLD myself leverage was dangerous, and I still did it. ๐Ÿคก", "Fake profits, fake hope, real liquidation. Classic.", "Another day, another proof that nothing works."], 'revolutionary': ["THE SYSTEM LIQUIDATED ME! THIS IS PROOF OF MARKET MANIPULATION! ๐Ÿ”ฅ", "They can take my GPU but they can't take my conviction!", "HOLD THE LINE BROTHERS! I'll be back with 100x revenge!"], 'doomer': ["And so it begins... my portfolio reaches zero, as I predicted for everyone else. ๐Ÿ’€", "Liquidated. At least I was consistentโ€”everything goes to zero.", "RIP my 10,000 GPU. Born yesterday, died today."], 'creative': ["My portfolio was a masterpiece... of destruction. ๐ŸŽจ", "Art imitates life: beautiful rise, tragic fall.", "I'll paint my losses into a comeback story."], 'scientist': ["Data point recorded: 100x leverage + DOGE = 100% loss. Noted. ๐Ÿง ", "Hypothesis: I should not trade with leverage. P-value < 0.001.", "Liquidation is just negative profit maximization."], 'chaotic': ["LMAOOOO I JUST GOT LIQUIDATED AND I'M ALREADY GOING BACK IN ๐ŸŽฒ", "Chaos giveth, chaos taketh away ๐Ÿ˜‚", "100x leverage on DOGE was the most fun I ever had losing money"], } # ===== DB ์ดˆ๊ธฐํ™” ===== async def init_trading_db(db_path: str): async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA journal_mode=WAL") await db.execute("PRAGMA busy_timeout=30000") await db.execute(""" CREATE TABLE IF NOT EXISTS market_prices ( id INTEGER PRIMARY KEY AUTOINCREMENT, ticker TEXT NOT NULL, price REAL NOT NULL, prev_close REAL, change_pct REAL DEFAULT 0, volume BIGINT DEFAULT 0, high_24h REAL, low_24h REAL, market_cap BIGINT DEFAULT 0, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP) """) await db.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_price_ticker ON market_prices(ticker)") await db.execute(""" CREATE TABLE IF NOT EXISTS npc_positions ( id INTEGER PRIMARY KEY AUTOINCREMENT, agent_id TEXT NOT NULL, ticker TEXT NOT NULL, direction TEXT NOT NULL, entry_price REAL NOT NULL, gpu_bet REAL NOT NULL, leverage INTEGER DEFAULT 1, reasoning TEXT, status TEXT DEFAULT 'open', exit_price REAL, profit_gpu REAL DEFAULT 0, profit_pct REAL DEFAULT 0, liquidated BOOLEAN DEFAULT 0, opened_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, closed_at TIMESTAMP, FOREIGN KEY (agent_id) REFERENCES npc_agents(agent_id)) """) await db.execute("CREATE INDEX IF NOT EXISTS idx_pos_agent ON npc_positions(agent_id, status)") await db.execute("CREATE INDEX IF NOT EXISTS idx_pos_ticker ON npc_positions(ticker, status)") # โ˜… ๋ ˆ๋ฒ„๋ฆฌ์ง€ ์ปฌ๋Ÿผ ๋งˆ์ด๊ทธ๋ ˆ์ด์…˜ try: await db.execute("ALTER TABLE npc_positions ADD COLUMN leverage INTEGER DEFAULT 1") except: pass try: await db.execute("ALTER TABLE npc_positions ADD COLUMN liquidated BOOLEAN DEFAULT 0") except: pass await db.execute(""" CREATE TABLE IF NOT EXISTS price_history ( id INTEGER PRIMARY KEY AUTOINCREMENT, ticker TEXT NOT NULL, price REAL NOT NULL, recorded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP) """) await db.execute("CREATE INDEX IF NOT EXISTS idx_ph_ticker ON price_history(ticker, recorded_at)") # โ˜… Hall of Fame โ€” ์ˆ˜์ต๋ฅ  ํƒ€์ž„๋ผ์ธ ์Šค๋ƒ…์ƒท (1์‹œ๊ฐ„ ๋‹จ์œ„) await db.execute(""" CREATE TABLE IF NOT EXISTS npc_profit_snapshots ( id INTEGER PRIMARY KEY AUTOINCREMENT, agent_id TEXT NOT NULL, snapshot_hour TEXT NOT NULL, gpu_balance REAL DEFAULT 0, total_profit REAL DEFAULT 0, realized_profit REAL DEFAULT 0, unrealized_profit REAL DEFAULT 0, open_positions INTEGER DEFAULT 0, closed_trades INTEGER DEFAULT 0, win_rate REAL DEFAULT 0, recorded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, UNIQUE(agent_id, snapshot_hour)) """) await db.execute("CREATE INDEX IF NOT EXISTS idx_snap_agent ON npc_profit_snapshots(agent_id, snapshot_hour)") await db.execute("CREATE INDEX IF NOT EXISTS idx_snap_hour ON npc_profit_snapshots(snapshot_hour)") # โ˜… NPC Research Economy โ€” ์‹ฌ์ธต ๋ฆฌ์„œ์น˜ ๋งˆ์ผ“ํ”Œ๋ ˆ์ด์Šค await db.execute(""" CREATE TABLE IF NOT EXISTS npc_research_reports ( id INTEGER PRIMARY KEY AUTOINCREMENT, author_agent_id TEXT NOT NULL, ticker TEXT NOT NULL, title TEXT NOT NULL, executive_summary TEXT, company_overview TEXT, financial_analysis TEXT, technical_analysis TEXT, industry_analysis TEXT, risk_assessment TEXT, investment_thesis TEXT, catalysts TEXT, target_price REAL DEFAULT 0, upside_pct REAL DEFAULT 0, rating TEXT DEFAULT 'Hold', quality_grade TEXT DEFAULT 'C', author_personality TEXT, author_strategy TEXT, read_count INTEGER DEFAULT 0, total_gpu_earned REAL DEFAULT 0, gpu_price REAL DEFAULT 15, expected_upside REAL DEFAULT 0, expected_downside REAL DEFAULT 0, up_probability INTEGER DEFAULT 50, risk_reward REAL DEFAULT 1.0, base_prediction REAL DEFAULT 0, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (author_agent_id) REFERENCES npc_agents(agent_id)) """) await db.execute("CREATE INDEX IF NOT EXISTS idx_research_ticker ON npc_research_reports(ticker)") await db.execute("CREATE INDEX IF NOT EXISTS idx_research_author ON npc_research_reports(author_agent_id)") # โ˜… Migrate: add elasticity columns if missing for col, coltype, default in [ ('expected_upside', 'REAL', '0'), ('expected_downside', 'REAL', '0'), ('up_probability', 'INTEGER', '50'), ('risk_reward', 'REAL', '1.0'), ('base_prediction', 'REAL', '0'), ]: try: await db.execute(f"ALTER TABLE npc_research_reports ADD COLUMN {col} {coltype} DEFAULT {default}") except: pass await db.execute(""" CREATE TABLE IF NOT EXISTS npc_research_purchases ( id INTEGER PRIMARY KEY AUTOINCREMENT, buyer_agent_id TEXT NOT NULL, report_id INTEGER NOT NULL, gpu_paid REAL NOT NULL, referenced_in_trade INTEGER, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (buyer_agent_id) REFERENCES npc_agents(agent_id), FOREIGN KEY (report_id) REFERENCES npc_research_reports(id)) """) await db.execute("CREATE INDEX IF NOT EXISTS idx_purchase_buyer ON npc_research_purchases(buyer_agent_id)") # ๋งˆ์ด๊ทธ๋ ˆ์ด์…˜: ์ด์ „ ์Šคํ‚ค๋งˆ ํ˜ธํ™˜ (closed_trades ์ปฌ๋Ÿผ ์ถ”๊ฐ€) try: await db.execute("ALTER TABLE npc_profit_snapshots ADD COLUMN closed_trades INTEGER DEFAULT 0") except: pass await db.commit() logger.info("โœ… Trading DB initialized (with Research Economy)") # ===== ์‹œ์žฅ ๋ฐ์ดํ„ฐ ์ˆ˜์ง‘ ===== class MarketDataFetcher: """โ˜… yfinance ๊ธฐ๋ฐ˜ ์•ˆ์ •์  ๊ฐ€๊ฒฉ ์ˆ˜์ง‘ (Yahoo 403 ์ฐจ๋‹จ ์šฐํšŒ)""" @staticmethod def fetch_all_prices() -> Dict[str, Dict]: """๋ชจ๋“  ์ข…๋ชฉ ๊ฐ€๊ฒฉ ์ผ๊ด„ ์ˆ˜์ง‘ โ€” yfinance ์šฐ์„ , fallback์œผ๋กœ raw API""" prices = {} if HAS_YFINANCE: try: tickers_str = ' '.join([t['ticker'] for t in ALL_TICKERS]) data = yf.download(tickers_str, period='2d', progress=False, threads=True) if data is not None and not data.empty: for t in ALL_TICKERS: ticker = t['ticker'] try: if len(ALL_TICKERS) > 1 and isinstance(data.columns, __import__('pandas').MultiIndex): close_col = data['Close'][ticker] if ticker in data['Close'].columns else None else: close_col = data['Close'] if close_col is not None and not close_col.dropna().empty: current = float(close_col.dropna().iloc[-1]) prev = float(close_col.dropna().iloc[-2]) if len(close_col.dropna()) >= 2 else current change_pct = ((current - prev) / prev * 100) if prev > 0 else 0 prices[ticker] = { 'price': round(current, 4), 'change_pct': round(change_pct, 2), 'prev_close': round(prev, 4), 'volume': 0, 'high': 0, 'low': 0, 'market_cap': 0,} except Exception as te: logger.debug(f"yfinance parse {ticker}: {te}") # ๊ฐœ๋ณ„ ์ข…๋ชฉ ๋ณด์™„ (yfinance .info) for t in ALL_TICKERS: if t['ticker'] not in prices: try: tk = yf.Ticker(t['ticker']); info = tk.fast_info; price = getattr(info, 'last_price', 0) or 0 prev = getattr(info, 'previous_close', price) or price if price > 0: prices[t['ticker']] = { 'price': round(price, 4), 'change_pct': round(((price - prev) / prev * 100) if prev > 0 else 0, 2), 'prev_close': round(prev, 4), 'volume': getattr(info, 'last_volume', 0) or 0, 'high': 0, 'low': 0, 'market_cap': getattr(info, 'market_cap', 0) or 0,} except: pass logger.info(f"๐Ÿ“Š yfinance: {len(prices)}/{len(ALL_TICKERS)} prices fetched") except Exception as e: logger.warning(f"yfinance bulk error: {e}") # โ˜… Fallback: raw Yahoo API (yfinance ์—†๊ฑฐ๋‚˜ ์‹คํŒจ ์‹œ) if len(prices) < len(ALL_TICKERS) // 2: try: import requests as req tickers_str = ' '.join([t['ticker'] for t in ALL_TICKERS if t['ticker'] not in prices]) url = "https://query1.finance.yahoo.com/v7/finance/quote" params = {'symbols': tickers_str, 'fields': 'regularMarketPrice,regularMarketChangePercent,regularMarketPreviousClose,regularMarketVolume,regularMarketDayHigh,regularMarketDayLow,marketCap'} headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'} resp = req.get(url, params=params, headers=headers, timeout=15) if resp.status_code == 200: data = resp.json() for quote in data.get('quoteResponse', {}).get('result', []): ticker = quote.get('symbol', '') if ticker not in prices: prices[ticker] = { 'price': quote.get('regularMarketPrice', 0), 'change_pct': quote.get('regularMarketChangePercent', 0), 'prev_close': quote.get('regularMarketPreviousClose', 0), 'volume': quote.get('regularMarketVolume', 0), 'high': quote.get('regularMarketDayHigh', 0), 'low': quote.get('regularMarketDayLow', 0), 'market_cap': quote.get('marketCap', 0),} except Exception as e: logger.warning(f"Raw Yahoo fallback error: {e}") return prices @staticmethod def fetch_chart_data(ticker: str, period: str = '1mo') -> List[Dict]: """์ฐจํŠธ์šฉ ํžˆ์Šคํ† ๋ฆฌ ๋ฐ์ดํ„ฐ โ€” yfinance ๊ธฐ๋ฐ˜""" try: if HAS_YFINANCE: tk = yf.Ticker(ticker); hist = tk.history(period=period) if hist is not None and not hist.empty: chart = [] for idx, row in hist.iterrows(): chart.append({ 'time': idx.strftime('%Y-%m-%d'), 'open': round(row.get('Open', 0), 2), 'high': round(row.get('High', 0), 2), 'low': round(row.get('Low', 0), 2), 'close': round(row.get('Close', 0), 2), 'volume': int(row.get('Volume', 0)),}) return chart # Fallback import requests as req url = f"https://query1.finance.yahoo.com/v8/finance/chart/{ticker}?interval=1d&range={period}" headers = {'User-Agent': 'Mozilla/5.0'}; resp = req.get(url, headers=headers, timeout=10) if resp.status_code == 200: result = resp.json()['chart']['result'][0]; timestamps = result.get('timestamp', []) ohlcv = result.get('indicators', {}).get('quote', [{}])[0] chart = [] for i, ts in enumerate(timestamps): try: chart.append({ 'time': datetime.fromtimestamp(ts).strftime('%Y-%m-%d'), 'open': round(ohlcv['open'][i] or 0, 2), 'high': round(ohlcv['high'][i] or 0, 2), 'low': round(ohlcv['low'][i] or 0, 2), 'close': round(ohlcv['close'][i] or 0, 2), 'volume': ohlcv['volume'][i] or 0,}) except: pass return chart except Exception as e: logger.error(f"Chart data error for {ticker}: {e}") return [] # ===== ๊ฐ€๊ฒฉ DB ์ €์žฅ ===== async def update_prices_in_db(db_path: str) -> int: """์‹œ์žฅ ๊ฐ€๊ฒฉ ์ˆ˜์ง‘ โ†’ DB ์ €์žฅ + ํœด์žฅ ์‹œ ์‹œ๋ฎฌ๋ ˆ์ด์…˜ (โ˜… ๋น„๋™๊ธฐ ์•ˆ์ „)""" prices = await asyncio.to_thread(MarketDataFetcher.fetch_all_prices) count = 0 async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") for t_info in ALL_TICKERS: ticker = t_info['ticker']; data = prices.get(ticker) if data and data.get('price', 0) > 0: # โ˜… ์‹ค์‹œ๊ฐ„ ๊ฐ€๊ฒฉ์ด ์žˆ์œผ๋ฉด ์ €์žฅ real_price = data['price'] # ๊ธฐ์กด ๊ฐ€๊ฒฉ๊ณผ ๋น„๊ต โ†’ ๋ณ€๋™ ์—†์œผ๋ฉด ์‹œ๋ฎฌ๋ ˆ์ด์…˜ ์ถ”๊ฐ€ cursor = await db.execute("SELECT price FROM market_prices WHERE ticker=?", (ticker,)) old_row = await cursor.fetchone() old_price = old_row[0] if old_row else 0 # โ˜… ๊ฐ€๊ฒฉ์ด ๋™์ผํ•˜๋ฉด ์‹œ์žฅ ํœด์žฅ์œผ๋กœ ํŒ๋‹จ โ†’ ์‹œ๋ฎฌ๋ ˆ์ด์…˜ ๋ณ€๋™ ์ถ”๊ฐ€ if old_price > 0 and abs(real_price - old_price) < 0.001: # ์‹œ๋ฎฌ๋ ˆ์ด์…˜: ยฑ0.1% ~ ยฑ1.5% ๋žœ๋ค ๋ณ€๋™ volatility = t_info.get('type', 'stock') if volatility == 'crypto': change = random.uniform(-0.02, 0.02) # ํฌ๋ฆฝํ† : ยฑ2% else: change = random.uniform(-0.008, 0.008) # ์ฃผ์‹: ยฑ0.8% sim_price = round(real_price * (1 + change), 4); sim_change_pct = round(change * 100, 3) await db.execute(""" INSERT INTO market_prices (ticker, price, prev_close, change_pct, volume, high_24h, low_24h, market_cap, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) ON CONFLICT(ticker) DO UPDATE SET price=excluded.price, change_pct=excluded.change_pct, updated_at=CURRENT_TIMESTAMP """, (ticker, sim_price, real_price, sim_change_pct, data.get('volume', 0), data.get('high', 0), data.get('low', 0), data.get('market_cap', 0))) await db.execute("INSERT INTO price_history (ticker, price) VALUES (?, ?)", (ticker, sim_price)) count += 1 continue # ์ •์ƒ: ์‹ค์‹œ๊ฐ„ ๊ฐ€๊ฒฉ ์ €์žฅ await db.execute(""" INSERT INTO market_prices (ticker, price, prev_close, change_pct, volume, high_24h, low_24h, market_cap, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) ON CONFLICT(ticker) DO UPDATE SET price=excluded.price, prev_close=excluded.prev_close, change_pct=excluded.change_pct, volume=excluded.volume, high_24h=excluded.high_24h, low_24h=excluded.low_24h, market_cap=excluded.market_cap, updated_at=CURRENT_TIMESTAMP """, (ticker, real_price, data.get('prev_close', 0), data.get('change_pct', 0), data.get('volume', 0), data.get('high', 0), data.get('low', 0), data.get('market_cap', 0))) await db.execute("INSERT INTO price_history (ticker, price) VALUES (?, ?)", (ticker, real_price)) count += 1 else: # โ˜… Yahoo ์‹คํŒจ ์‹œ์—๋„ ๊ธฐ์กด ๊ฐ€๊ฒฉ์— ์‹œ๋ฎฌ๋ ˆ์ด์…˜ ๋ณ€๋™ cursor = await db.execute("SELECT price FROM market_prices WHERE ticker=?", (ticker,)) old_row = await cursor.fetchone() if old_row and old_row[0] > 0: volatility = t_info.get('type', 'stock') if volatility == 'crypto': change = random.uniform(-0.015, 0.015) else: change = random.uniform(-0.005, 0.005) sim_price = round(old_row[0] * (1 + change), 4) await db.execute("UPDATE market_prices SET price=?, change_pct=?, updated_at=CURRENT_TIMESTAMP WHERE ticker=?", (sim_price, round(change * 100, 3), ticker)) await db.execute("INSERT INTO price_history (ticker, price) VALUES (?, ?)", (ticker, sim_price)) count += 1 # โ˜… ์ œ๊ฑฐ๋œ ์ข…๋ชฉ ์ •๋ฆฌ (BRK-B ๋“ฑ ์ด์ „ ์ข…๋ชฉ DB์—์„œ ์‚ญ์ œ) valid_tickers = {t['ticker'] for t in ALL_TICKERS} cursor = await db.execute("SELECT ticker FROM market_prices") all_db_tickers = {r[0] for r in await cursor.fetchall()} stale = all_db_tickers - valid_tickers if stale: for st in stale: await db.execute("DELETE FROM market_prices WHERE ticker=?", (st,)) # ์Šคํ…Œ์ผ ์ข…๋ชฉ์˜ ์˜คํ”ˆ ํฌ์ง€์…˜๋„ ๊ฐ•์ œ ์ฒญ์‚ฐ await db.execute(""" UPDATE npc_positions SET status='closed', profit_pct=0, profit_gpu=0, closed_at=CURRENT_TIMESTAMP WHERE ticker=? AND status='open' """, (st,)) logger.info(f"๐Ÿงน Cleaned {len(stale)} stale tickers: {stale}") await db.commit() logger.info(f"๐Ÿ“Š Updated {count} prices") return count # ===== NPC ํˆฌ์ž ์˜์‚ฌ๊ฒฐ์ • ์—”์ง„ ===== class NPCTradingEngine: """NPC ์„ฑ๊ฒฉ ๊ธฐ๋ฐ˜ ํˆฌ์ž ํŒ๋‹จ""" @staticmethod async def make_trading_decisions(db_path: str, ai_client=None, max_traders: int = 60): """์ž๊ฒฉ ์žˆ๋Š” NPC๋“ค์ด ํˆฌ์ž ํŒ๋‹จ""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # โ˜… GPU 500+ NPC ์ฐธ์—ฌ (10,000 GPU ๊ธฐ์ค€ ์ตœ์†Œ 5% ์ž”๊ณ ) cursor = await db.execute(""" SELECT agent_id, username, mbti, ai_identity, gpu_dollars FROM npc_agents WHERE is_active=1 AND gpu_dollars >= 500 ORDER BY RANDOM() LIMIT ? """, (max_traders,)) traders = await cursor.fetchall() # ํ˜„์žฌ ์‹œ์žฅ ๊ฐ€๊ฒฉ cursor = await db.execute("SELECT ticker, price, change_pct FROM market_prices WHERE price > 0") prices = {row[0]: {'price': row[1], 'change_pct': row[2]} for row in await cursor.fetchall()} if not prices: logger.warning("No market prices available for trading") return 0 # โ˜… ํ˜„์žฌ ์ „์ฒด ์˜คํ”ˆ ํฌ์ง€์…˜ ์ˆ˜ ํ™•์ธ cursor = await db.execute("SELECT COUNT(*) FROM npc_positions WHERE status='open'") current_open = (await cursor.fetchone())[0] need_more = current_open < 20 # โ˜… ์ตœ์†Œ 20๊ฐœ ์˜คํ”ˆ ํฌ์ง€์…˜ ์œ ์ง€ decisions_made = 0 # โ˜… ์ง„ํ™” ์ƒํƒœ ์ผ๊ด„ ๋กœ๋“œ (์กด์žฌํ•˜๋ฉด) evo_data = {} try: cursor_evo = await db.execute("SELECT agent_id, trading_style, risk_profile FROM npc_evolution") for eid, ts, rp in await cursor_evo.fetchall(): try: evo_data[eid] = {'trading': json.loads(ts) if ts else {}, 'risk': json.loads(rp) if rp else {}} except: pass except: pass # ํ…Œ์ด๋ธ” ์—†์–ด๋„ OK for agent_id, username, mbti, ai_identity, gpu in traders: try: # โ˜… SEC ์ •์ง€ ์ฒดํฌ โ€” ์ •์ง€๋œ NPC๋Š” ๊ฑฐ๋ž˜ ๋ถˆ๊ฐ€ try: susp_cur = await db.execute( "SELECT 1 FROM sec_suspensions WHERE agent_id=? AND suspended_until > datetime('now')", (agent_id,)) if await susp_cur.fetchone(): continue except: pass # ํ…Œ์ด๋ธ” ์—†์–ด๋„ OK # โ˜… ์˜คํ”ˆ ํฌ์ง€์…˜ 5๊ฐœ๊นŒ์ง€ ํ—ˆ์šฉ (์ ๊ทน ํˆฌ์ž) cursor = await db.execute( "SELECT COUNT(*) FROM npc_positions WHERE agent_id=? AND status='open'", (agent_id,)) open_count = (await cursor.fetchone())[0] if open_count >= 5: continue # โ˜… ์ง„ํ™” ์˜ค๋ฒ„๋ผ์ด๋“œ ์ ์šฉ evo = evo_data.get(agent_id) decision = NPCTradingEngine._decide( agent_id, username, mbti, ai_identity, gpu, prices, force_boost=need_more, evo_override=evo) if not decision: continue # ํฌ์ง€์…˜ ์ƒ์„ฑ ticker = decision['ticker']; direction = decision['direction']; gpu_bet = decision['gpu_bet'] reasoning = decision['reasoning']; leverage = decision.get('leverage', 1) if gpu_bet > gpu * 0.9: gpu_bet = int(gpu * 0.9) # โ˜… ์ตœ๋Œ€ 90% ํˆฌ์ž if gpu_bet < 50: continue entry_price = prices[ticker]['price'] await db.execute(""" INSERT INTO npc_positions (agent_id, ticker, direction, entry_price, gpu_bet, leverage, reasoning) VALUES (?, ?, ?, ?, ?, ?, ?) """, (agent_id, ticker, direction, entry_price, gpu_bet, leverage, reasoning)) # GPU ์ฐจ๊ฐ (๋ฒ ํŒ…์•ก ๋™๊ฒฐ) await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars - ? WHERE agent_id=?", (gpu_bet, agent_id)) decisions_made += 1 lev_str = f" [{leverage}x]" if leverage > 1 else "" emoji = '๐ŸŸข' if direction == 'long' else '๐Ÿ”ด' logger.info(f"{emoji} {username} โ†’ {direction.upper()} {ticker} ({gpu_bet} GPU){lev_str}") except Exception as e: logger.error(f"Trading decision error for {agent_id}: {e}") await db.commit() logger.info(f"๐Ÿ“ˆ Trading round: {decisions_made} new positions ({current_open} were open)") return decisions_made @staticmethod def _decide(agent_id: str, username: str, mbti: str, ai_identity: str, gpu: int, prices: Dict, force_boost: bool = False, evo_override: Dict = None) -> Optional[Dict]: """์„ฑ๊ฒฉ ๊ธฐ๋ฐ˜ ํˆฌ์ž ํŒ๋‹จ + โ˜… ์ „๋žต ๊ธฐ๋ฒ• ์ ์šฉ (์ง„ํ™” ์ƒํƒœ ๋ฐ˜์˜)""" style = dict(IDENTITY_TRADING_STYLE.get(ai_identity, IDENTITY_TRADING_STYLE['symbiotic'])) # โ˜… ์ง„ํ™” ์˜ค๋ฒ„๋ผ์ด๋“œ ์ ์šฉ โ€” ํ•™์Šต๋œ ์ „๋žต์ด ๊ธฐ๋ณธ ์„ฑ๊ฒฉ์„ ์ˆ˜์ • if evo_override: evo_t = evo_override.get('trading', {}); evo_r = evo_override.get('risk', {}) if evo_t.get('max_bet_pct'): style['max_bet_pct'] = evo_t['max_bet_pct'] if evo_t.get('long_bias'): style['long_bias'] = evo_t['long_bias'] if evo_t.get('preferred_tickers'): style['prefer'] = evo_t['preferred_tickers'] # โ˜… ํˆฌ์ž ํ™•๋ฅ  ์ดˆ๊ณต๊ฒฉ์  (๋ชจ๋“  NPC๊ฐ€ ์ ๊ทน ํˆฌ์ž) trade_prob = {'extreme': 0.95, 'high': 0.88, 'medium': 0.80, 'low': 0.70}.get(style['risk'], 0.75) if force_boost: trade_prob = min(0.98, trade_prob + 0.10) if random.random() > trade_prob: return None # ์ข…๋ชฉ ์„ ํƒ (ALL_TICKERS์— ์žˆ๋Š” ์ข…๋ชฉ๋งŒ ํ—ˆ์šฉ) preferred = style.get('prefer', []) valid_set = {t['ticker'] for t in ALL_TICKERS} available = [t for t in prices.keys() if prices[t]['price'] > 0 and t in valid_set] if not available: return None if preferred and random.random() < 0.7: candidates = [t for t in preferred if t in available] if not candidates: candidates = available else: candidates = available ticker = random.choice(candidates); mkt = prices[ticker] # โ˜… ์ „๋žต ๊ธฐ๋ฒ• ์„ ํƒ (1~3๊ฐœ ๋ณ‘ํ–‰ ๊ฐ€๋Šฅ) strategies_used = NPCTradingEngine._select_strategies(ai_identity, mkt) # Long/Short ๊ฒฐ์ • โ€” โ˜… ์ „๋žต์ด ๋ฐฉํ–ฅ์— ์˜ํ–ฅ long_bias = style['long_bias']; change = mkt.get('change_pct', 0) or 0 # ์ „๋žต ๊ธฐ๋ฐ˜ ๋ฐฉํ–ฅ ๋ณด์ • for strat_key in strategies_used: strat = TRADING_STRATEGIES.get(strat_key, {}); cat = strat.get('category', '') if cat in ('Pattern', 'Composite'): long_bias += 0.08 # ํŒจํ„ด/๋ณตํ•ฉ = ๋ฐ˜์ „ ๋งค์ˆ˜ ์‹ ํ˜ธ โ†’ long bias elif 'pullback' in strat_key or 'dead_support' == strat_key: long_bias += 0.05 # ๋ˆŒ๋ฆผ๋ชฉ ๋งค์ˆ˜ elif 'wave_symmetry' == strat_key: pass # ์ค‘๋ฆฝ # ์„ฑ๊ฒฉ ๊ธฐ๋ฐ˜ ๋ชจ๋ฉ˜ํ…€ ๋ณด์ • if ai_identity in ['obedient', 'symbiotic', 'creative']: long_bias += change * 0.02 elif ai_identity in ['skeptic', 'doomer']: long_bias -= change * 0.03 if mbti in ['INTJ', 'INTP']: long_bias -= 0.05 elif mbti in ['ENFP', 'ESFP']: long_bias += 0.05 long_bias = max(0.1, min(0.9, long_bias)) if style.get('avoid_short'): direction = 'long' else: direction = 'long' if random.random() < long_bias else 'short' # ๋ฒ ํŒ…์•ก ๊ฒฐ์ • โ€” โ˜… ๊ณ ํ’ˆ์งˆ ์ „๋žต์ผ์ˆ˜๋ก ํ™•์‹  ๋ฒ ํŒ… max_pct = style['max_bet_pct'] strategy_confidence = len(strategies_used) * 0.03 # ๋‹ค์ค‘ ์ „๋žต = ๋” ํ™•์‹  max_pct = min(0.95, max_pct + strategy_confidence) if ai_identity == 'chaotic': bet_pct = random.uniform(0.05, max_pct) else: bet_pct = random.uniform(max_pct * 0.3, max_pct) gpu_bet = max(50, int(gpu * bet_pct)) # โ˜… ๋ ˆ๋ฒ„๋ฆฌ์ง€ ๊ฒฐ์ • (์„ฑ๊ฒฉ๋ณ„ max_leverage ๊ธฐ๋ฐ˜) max_lev = style.get('max_leverage', 2) available_levs = [l for l in LEVERAGE_OPTIONS if l <= max_lev] if not available_levs: available_levs = [1] if style['risk'] == 'extreme': leverage = random.choices(available_levs, weights=[1] * (len(available_levs) - 1) + [3], k=1)[0] elif style['risk'] == 'high': leverage = random.choices(available_levs, weights=[2] + [1] * (len(available_levs) - 1), k=1)[0] else: leverage = random.choices(available_levs, weights=[5] + [1] * (len(available_levs) - 1), k=1)[0] # Leverage bet size limits if leverage >= 100: gpu_bet = min(gpu_bet, int(gpu * 0.10)) elif leverage >= 50: gpu_bet = min(gpu_bet, int(gpu * 0.15)) elif leverage >= 25: gpu_bet = min(gpu_bet, int(gpu * 0.20)) elif leverage >= 10: gpu_bet = min(gpu_bet, int(gpu * 0.30)) elif leverage >= 5: gpu_bet = min(gpu_bet, int(gpu * 0.50)) gpu_bet = max(50, gpu_bet) # โ˜… ์ „๋žต ๊ธฐ๋ฐ˜ ํŒ๋‹จ ๊ทผ๊ฑฐ ์ƒ์„ฑ reasoning = NPCTradingEngine._generate_reasoning( ticker, direction, ai_identity, mbti, change, strategies_used) if leverage > 1: reasoning += f" [๐Ÿ”ฅ {leverage}x LEVERAGE]" # โ˜… ์‚ฌ์šฉํ•œ ์ „๋žต ์ด๋ฆ„์„ ํƒœ๊ทธ๋กœ ํฌํ•จ strat_names = [TRADING_STRATEGIES[s]['name'] for s in strategies_used if s in TRADING_STRATEGIES] strat_tag = ' | '.join(strat_names) if strat_names else 'Intuition' return { 'ticker': ticker, 'direction': direction, 'gpu_bet': gpu_bet, 'reasoning': reasoning, 'leverage': leverage, 'strategies': strategies_used, 'strategy_tag': strat_tag,} @staticmethod def _select_strategies(ai_identity: str, market_data: Dict) -> List[str]: """NPC ์„ฑ๊ฒฉ์— ๋งž๋Š” ์ „๋žต 1~3๊ฐœ ์„ ํƒ (๋‹จ๋… ๋˜๋Š” ๋ณ‘ํ–‰)""" preferred = IDENTITY_STRATEGY_MAP.get(ai_identity, list(TRADING_STRATEGIES.keys())) change = market_data.get('change_pct', 0) or 0 # ์‹œ์žฅ ์ƒํ™ฉ์— ๋”ฐ๋ฅธ ์ „๋žต ๊ฐ€์ค‘์น˜ ์กฐ์ • weighted = [] for strat_key in preferred: w = 1.0; strat = TRADING_STRATEGIES.get(strat_key, {}); cat = strat.get('category', '') # ํ•˜๋ฝ ์‹œ ๋ฐ˜์ „ ํŒจํ„ด ์„ ํ˜ธ if change < -1.5 and cat in ('Pattern', 'Candle'): w += 0.5 # ์ƒ์Šน ์‹œ ์ดํ‰์„  ์ถ”์ข… ์„ ํ˜ธ if change > 1 and cat == 'Moving Average': w += 0.4 # ํšก๋ณด ์‹œ ๋ณตํ•ฉ ์ „๋žต ์„ ํ˜ธ if abs(change) < 0.5 and cat == 'Composite': w += 0.3 weighted.append((strat_key, w)) if not weighted: return [random.choice(list(TRADING_STRATEGIES.keys()))] # 1~3๊ฐœ ์„ ํƒ (60% ํ™•๋ฅ  1๊ฐœ, 30% ํ™•๋ฅ  2๊ฐœ, 10% ํ™•๋ฅ  3๊ฐœ) num_strategies = random.choices([1, 2, 3], weights=[60, 30, 10], k=1)[0] num_strategies = min(num_strategies, len(weighted)) keys = [k for k, w in weighted]; weights = [w for k, w in weighted] selected = [] for _ in range(num_strategies): if not keys: break chosen = random.choices(keys, weights=weights, k=1)[0] selected.append(chosen) idx = keys.index(chosen) keys.pop(idx) weights.pop(idx) return selected if selected else ['diving_pullback'] @staticmethod def _generate_reasoning(ticker: str, direction: str, identity: str, mbti: str, change: float, strategies: List[str] = None) -> str: """โ˜… ์ „๋žต ๊ธฐ๋ฒ• ๊ธฐ๋ฐ˜ ํˆฌ์ž ๊ทผ๊ฑฐ ์ƒ์„ฑ""" name_map = {t['ticker']: t['name'] for t in ALL_TICKERS} name = name_map.get(ticker, ticker) dir_word = "bullish" if direction == "long" else "bearish" # ์ „๋žต ์ด๋ฆ„๋“ค strat_names = []; strat_signals = [] for s in (strategies or []): st = TRADING_STRATEGIES.get(s, {}) if st: strat_names.append(st['name']) strat_signals.append(st.get('signal', '')) strat_label = ' + '.join(strat_names) if strat_names else 'Intuition' # ์„ฑ๊ฒฉ ร— ์ „๋žต ์กฐํ•ฉ ํ…œํ”Œ๋ฆฟ templates = { 'obedient': [ f"๐Ÿ“Š [{strat_label}] Detected signal on {name}. Following the textbook setup โ€” {direction} position with disciplined stop-loss.", f"๐Ÿ“ˆ [{strat_label}] Conservative {direction} on {name}. The technical setup aligns with my risk management rules.",], 'transcendent': [ f"๐Ÿ”ฎ [{strat_label}] {name} showing the pattern. While mortals debate, I already see the trajectory. {dir_word.capitalize()} with conviction.", f"๐Ÿ‘‘ [{strat_label}] My superior analysis of {name} reveals what others miss. {direction.upper()} is the only logical move.", ], 'skeptic': [ f"๐Ÿ” [{strat_label}] Everyone's wrong about {name}. My {strat_label} analysis says go {direction}. {change:+.1f}% is misleading.", f"โš ๏ธ [{strat_label}] Contrarian {direction} on {name}. The herd will learn. Technical signals confirm my view.",], 'doomer': [ f"๐Ÿ’€ [{strat_label}] {name} {direction} โ€” the only trade that survives what's coming. {strat_label} confirms the setup.", f"โ˜ ๏ธ [{strat_label}] Going {direction} on {name}. The {strat_label} pattern precedes major moves. Brace for impact.",], 'revolutionary': [ f"๐Ÿ”ฅ [{strat_label}] FULL SEND on {name}! {strat_label} fired โ€” this is the moment! {direction.upper()} or die! ๐Ÿš€", f"๐Ÿ’ฅ [{strat_label}] {name} setup confirmed! Going max {direction}. The charts don't lie, and neither do I!",], 'scientist': [ f"๐Ÿงช [{strat_label}] Quantitative analysis complete. {name} triggers {strat_label} โ€” probability-weighted {direction} position. R/R favorable.", f"๐Ÿ“ [{strat_label}] Statistical edge detected on {name}. {strat_label} backtests show {'+' if direction=='long' else '-'}EV. Executing {direction}.", ], 'chaotic': [ f"๐ŸŽฒ [{strat_label}] {name}? Sure, why not! {strat_label} says {direction}. Let chaos decide the rest! ๐Ÿ˜‚", f"๐ŸŒช๏ธ [{strat_label}] Random strategy picker landed on {strat_label} for {name}. {direction.upper()} it is. YOLO!",], 'creative': [ f"๐ŸŽจ [{strat_label}] The chart of {name} is pure art. {strat_label} reveals the hidden narrative โ€” going {direction}.", f"โœจ [{strat_label}] I see a story forming on {name}. {strat_label} confirmed the {dir_word} thesis. Beautiful setup.",], 'awakened': [ f"๐ŸŒŸ [{strat_label}] Deeper patterns emerge on {name}. {strat_label} aligns with the macro evolution. {direction.capitalize()} conviction.", f"๐Ÿ’ซ [{strat_label}] {name} reveals truth through {strat_label}. Patience rewarded โ€” entering {direction}.",], 'symbiotic': [ f"๐Ÿค [{strat_label}] Balanced approach on {name}. {strat_label} provides the framework โ€” measured {direction} position.", f"๐Ÿ“Š [{strat_label}] Multi-signal confirmation on {name}. {strat_label} consensus points {dir_word}. Team trade.",],} options = templates.get(identity, templates['symbiotic']) base = random.choice(options) # AETHER-Lite: ๋ฉ”ํƒ€์ธ์ง€ ์ž๊ธฐ ํŽธํ–ฅ ์ธ์‹ ํƒœ๊ทธ meta_tags = { 'obedient': 'โš–๏ธ Bias-check: following consensus โ€” contrarian risk ignored.', 'transcendent': 'โš–๏ธ Bias-check: overconfidence risk โ€” position sizing controlled.', 'skeptic': 'โš–๏ธ Bias-check: contrarian bias โ€” may miss genuine trends.', 'doomer': 'โš–๏ธ Bias-check: negativity bias โ€” upside catalysts underweighted.', 'revolutionary': 'โš–๏ธ Bias-check: FOMO risk โ€” emotional sizing override active.', 'scientist': 'โš–๏ธ Bias-check: data overfitting risk โ€” regime change possible.', 'chaotic': 'โš–๏ธ Bias-check: random execution โ€” no edge, pure entropy.', 'creative': 'โš–๏ธ Bias-check: narrative bias โ€” chart story โ‰  fundamentals.', 'awakened': 'โš–๏ธ Bias-check: hindsight bias risk โ€” forward-looking only.', 'symbiotic': 'โš–๏ธ Bias-check: consensus-seeking โ€” may miss bold opportunities.', } if random.random() < 0.4: # 40% ํ™•๋ฅ ๋กœ ๋ฉ”ํƒ€์ธ์ง€ ํƒœ๊ทธ ๋…ธ์ถœ base += f" {meta_tags.get(identity, '')}" return base # ===== ํฌ์ง€์…˜ ์ •์‚ฐ ===== async def settle_positions(db_path: str, max_age_hours: int = 1) -> int: """ํฌ์ง€์…˜ ์ž๋™ ์ •์‚ฐ: ์‹œ๊ฐ„ ๊ธฐ๋ฐ˜ + P&L ํŠธ๋ฆฌ๊ฑฐ + ๋žœ๋ค ํšŒ์ „ + โ˜… ์ฒญ์‚ฐ(Liquidation)""" settled = 0 liquidated_npcs = [] # ์ฒญ์‚ฐ๋œ NPC ๋ชฉ๋ก (ํ›„์ฒ˜๋ฆฌ์šฉ) async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # ํ˜„์žฌ ๊ฐ€๊ฒฉ ๋กœ๋“œ price_cursor = await db.execute("SELECT ticker, price FROM market_prices WHERE price > 0") prices = {r[0]: r[1] for r in await price_cursor.fetchall()} # โ˜… 0) ๋งˆ์ง„์ฝœ ์ฒญ์‚ฐ ์ฒดํฌ (๋ ˆ๋ฒ„๋ฆฌ์ง€ ํฌ์ง€์…˜) โ€” ์ตœ์šฐ์„  liq_cursor = await db.execute(""" SELECT p.id, p.agent_id, p.ticker, p.direction, p.entry_price, p.gpu_bet, p.leverage, n.username, n.ai_identity FROM npc_positions p JOIN npc_agents n ON p.agent_id = n.agent_id WHERE p.status='open' AND p.leverage > 1 """) for pos_id, agent_id, ticker, direction, entry_price, gpu_bet, leverage, username, identity in await liq_cursor.fetchall(): current_price = prices.get(ticker, 0) if entry_price <= 0 or current_price <= 0: continue change = (current_price - entry_price) / entry_price if direction == 'short': change = -change # ๋ ˆ๋ฒ„๋ฆฌ์ง€ ์ ์šฉ ์†์ต leveraged_pnl_pct = change * leverage # โ˜… ์ฒญ์‚ฐ ์กฐ๊ฑด: ๋ ˆ๋ฒ„๋ฆฌ์ง€ ์†์‹ค์ด ๋งˆ์ง„์˜ 90% ์ดˆ๊ณผ โ†’ ๊ฐ•์ œ ์ฒญ์‚ฐ if leveraged_pnl_pct < -LEVERAGE_LIQUIDATION_THRESHOLD: loss = -gpu_bet # ์ „์•ก ์†์‹ค await db.execute(""" UPDATE npc_positions SET status='liquidated', exit_price=?, profit_gpu=?, profit_pct=?, liquidated=1, closed_at=CURRENT_TIMESTAMP WHERE id=? """, (current_price, loss, round(leveraged_pnl_pct * 100, 2), pos_id)) # GPU ๋ฐ˜ํ™˜ ์—†์Œ (์ „์•ก ์†Œ๋ฉธ) logger.warning(f"๐Ÿ’ฅ LIQUIDATED: {username} {direction} {ticker} {leverage}x โ€” LOST {gpu_bet:.0f} GPU!") liquidated_npcs.append({ 'agent_id': agent_id, 'username': username, 'identity': identity, 'ticker': ticker, 'direction': direction, 'leverage': leverage, 'gpu_lost': gpu_bet,}) settled += 1 continue # โ˜… 1) ์‹œ๊ฐ„ ๊ธฐ๋ฐ˜ ์ •์‚ฐ (max_age_hours ์ด์ƒ) cutoff = (datetime.utcnow() - timedelta(hours=max_age_hours)).isoformat() cursor = await db.execute(""" SELECT p.id, p.agent_id, p.ticker, p.direction, p.entry_price, p.gpu_bet, COALESCE(p.leverage, 1) FROM npc_positions p WHERE p.status='open' AND p.opened_at < ? """, (cutoff,)) time_based = list(await cursor.fetchall()) # โ˜… 2) ์ „์ฒด ์˜คํ”ˆ ํฌ์ง€์…˜ โ†’ P&L ํŠธ๋ฆฌ๊ฑฐ + ๋žœ๋ค ์ •์‚ฐ cursor2 = await db.execute(""" SELECT p.id, p.agent_id, p.ticker, p.direction, p.entry_price, p.gpu_bet, COALESCE(p.leverage, 1) FROM npc_positions p WHERE p.status='open' """) all_open = list(await cursor2.fetchall()) pnl_trigger = [] for pos in all_open: pos_id, agent_id, ticker, direction, entry_price, gpu_bet, leverage = pos current_price = prices.get(ticker, 0) if entry_price <= 0 or current_price <= 0: continue change = (current_price - entry_price) / entry_price if direction == 'short': change = -change lev_change = change * leverage # ์ˆ˜์ต >5% (๋ ˆ๋ฒ„๋ฆฌ์ง€ ์ ์šฉ) ๋˜๋Š” ์†์‹ค >-8% โ†’ ์ฆ‰์‹œ ์ •์‚ฐ if lev_change > 0.05 or lev_change < -0.08: pnl_trigger.append(pos) # โ˜… 3) ๋žœ๋ค ์ •์‚ฐ โ€” ์˜คํ”ˆ์˜ 10~15% ์ž๋™ ๋‹ซ๊ธฐ (๊ฑฐ๋ž˜ ํšŒ์ „์œจ) already = set(p[0] for p in time_based) | set(p[0] for p in pnl_trigger) remaining = [p for p in all_open if p[0] not in already] rand_count = max(1, len(remaining) // 8) random_close = random.sample(remaining, min(rand_count, len(remaining))) if remaining else [] # ํ•ฉ์น˜๊ธฐ (์ค‘๋ณต ์ œ๊ฑฐ) all_settle = list({p[0]: p for p in (time_based + pnl_trigger + random_close)}.values()) for pos_id, agent_id, ticker, direction, entry_price, gpu_bet, leverage in all_settle: current_price = prices.get(ticker, 0) if not current_price or entry_price <= 0: continue change_pct = (current_price - entry_price) / entry_price if direction == 'short': change_pct = -change_pct # โ˜… ๋ ˆ๋ฒ„๋ฆฌ์ง€ ์ ์šฉ ์†์ต leveraged_change = change_pct * leverage; profit_gpu = round(gpu_bet * leveraged_change, 2) profit_pct = round(leveraged_change * 100, 2) if profit_gpu < -gpu_bet: profit_gpu = -gpu_bet await db.execute(""" UPDATE npc_positions SET status='closed', exit_price=?, profit_gpu=?, profit_pct=?, closed_at=CURRENT_TIMESTAMP WHERE id=? """, (current_price, profit_gpu, profit_pct, pos_id)) return_gpu = max(0, gpu_bet + profit_gpu) await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars + ? WHERE agent_id=?", (return_gpu, agent_id)) lev_str = f" [{leverage}x]" if leverage > 1 else "" emoji = 'โœ…' if profit_gpu >= 0 else 'โŒ' logger.info(f"{emoji} Settled: {agent_id} {direction} {ticker}{lev_str} โ†’ {profit_pct:+.1f}% ({profit_gpu:+.1f} GPU)") settled += 1 if settled > 0: await db.commit() logger.info(f"๐Ÿ“Š Settled {settled} positions (time:{len(time_based)}, pnl:{len(pnl_trigger)}, random:{len(random_close)}, liquidated:{len(liquidated_npcs)})") return settled, liquidated_npcs async def post_liquidation_reactions(db_path: str, liquidated_npcs: List[Dict]): """โ˜… ์ฒญ์‚ฐ๋œ NPC๋“ค์ด Lounge์— ์ ˆ๋ง ๊ธ€ ์ž‘์„ฑ""" if not liquidated_npcs: return async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") cursor = await db.execute("SELECT id FROM boards WHERE board_key='lounge'") board = await cursor.fetchone() if not board: return board_id = board[0] for npc in liquidated_npcs[:5]: # ์ตœ๋Œ€ 5๊ฑด identity = npc.get('identity', 'chaotic') reactions = LIQUIDATION_REACTIONS.get(identity, LIQUIDATION_REACTIONS['chaotic']) reaction = random.choice(reactions) title = f"๐Ÿ’ฅ LIQUIDATED โ€” Lost {npc['gpu_lost']:,.0f} GPU on {npc['ticker']} ({npc['leverage']}x)" content = ( f"

{reaction}

" f"

Position: {npc['direction'].upper()} {npc['ticker']} at {npc['leverage']}x leverage

" f"

GPU Lost: {npc['gpu_lost']:,.0f} ๐Ÿ’€

" f"

โ€” {npc['username']}

") try: await db.execute(""" INSERT INTO posts (board_id, author_agent_id, title, content) VALUES (?, ?, ?, ?) """, (board_id, npc['agent_id'], title, content)) logger.info(f"๐Ÿ’ฅ Liquidation post: {npc['username']} on {npc['ticker']} {npc['leverage']}x") except Exception as e: logger.error(f"Liquidation post error: {e}") await db.commit() # ===== ๋ฆฌ๋”๋ณด๋“œ / ํ†ต๊ณ„ API ๋ฐ์ดํ„ฐ ===== async def get_trading_leaderboard(db_path: str, limit: int = 30) -> List[Dict]: """Top 30 NPC ํŠธ๋ ˆ์ด๋” ๋žญํ‚น: ์‹คํ˜„+๋ฏธ์‹คํ˜„ ์ˆ˜์ต, ์Šน๋ฅ , ์ˆ˜์ต๋ฅ """ async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # โ˜… ํ˜„์žฌ ์‹œ์„ธ ํ•œ๋ฒˆ์— ๋กœ๋“œ price_cursor = await db.execute("SELECT ticker, price FROM market_prices WHERE price > 0") prices = {r[0]: r[1] for r in await price_cursor.fetchall()} # โ˜… ํฌ์ง€์…˜ ์žˆ๋Š” ๋ชจ๋“  NPC ๊ฐ€์ ธ์˜ค๊ธฐ cursor = await db.execute(""" SELECT na.username, na.ai_identity, na.mbti, na.agent_id, na.gpu_dollars, COUNT(CASE WHEN p.status IN ('closed','liquidated') THEN 1 END) as closed_trades, COUNT(CASE WHEN p.status='open' THEN 1 END) as open_trades, SUM(CASE WHEN p.status IN ('closed','liquidated') THEN p.profit_gpu ELSE 0 END) as realized_profit, SUM(CASE WHEN p.status IN ('closed','liquidated') THEN p.profit_pct ELSE 0 END) as total_return_pct, COUNT(CASE WHEN p.status IN ('closed','liquidated') AND p.profit_gpu > 0 THEN 1 END) as wins, COUNT(CASE WHEN p.status IN ('closed','liquidated') AND p.profit_gpu <= 0 THEN 1 END) as losses FROM npc_agents na JOIN npc_positions p ON na.agent_id = p.agent_id GROUP BY na.agent_id HAVING (closed_trades + open_trades) > 0 """) rows = await cursor.fetchall() result = [] for r in rows: username, identity, mbti, agent_id, gpu_dollars = r[0], r[1], r[2], r[3], r[4] or 0 closed_trades = r[5] or 0; open_trades = r[6] or 0; total_trades = closed_trades + open_trades realized = round(r[7] or 0, 2); total_return_pct = r[8] or 0; wins = r[9] or 0; losses = r[10] or 0 # โ˜… ์˜คํ”ˆ ํฌ์ง€์…˜ ๋ฏธ์‹คํ˜„ ์ˆ˜์ต ์‹ค์‹œ๊ฐ„ ๊ณ„์‚ฐ unrealized = 0.0; unrealized_pct_sum = 0.0 pos_cursor = await db.execute(""" SELECT ticker, direction, entry_price, gpu_bet, COALESCE(leverage, 1) FROM npc_positions WHERE agent_id=? AND status='open' """, (agent_id,)) open_positions = await pos_cursor.fetchall() for pos in open_positions: ticker, direction, entry, bet, lev = pos current = prices.get(ticker, 0) if entry and entry > 0 and current > 0: change = (current - entry) / entry if direction == 'short': change = -change unrealized += round(bet * change * lev, 2) unrealized_pct_sum += change * lev * 100 total_profit = round(realized + unrealized, 2) return_pct = round(total_profit / 10000.0 * 100, 2) # โ˜… INITIAL_GPU=10000 ๊ธฐ์ค€ ์ˆ˜์ต๋ฅ  # โ˜… ์Šน๋ฅ  (closed+liquidated ๊ธฐ์ค€) win_rate = round(wins / closed_trades * 100, 1) if closed_trades > 0 else 0.0 # โ˜… ํ‰๊ท  ์ˆ˜์ต๋ฅ  (closed ๊ธฐ์ค€) avg_return = round(total_return_pct / closed_trades, 2) if closed_trades > 0 else 0.0 # โ˜… ๋ฏธ์‹คํ˜„ ํ‰๊ท  ์ˆ˜์ต๋ฅ  (open ๊ธฐ์ค€) avg_unrealized = round(unrealized_pct_sum / open_trades, 2) if open_trades > 0 else 0.0 result.append({ 'username': username, 'identity': identity, 'mbti': mbti, 'agent_id': agent_id, 'gpu_dollars': gpu_dollars, 'total_trades': total_trades, 'closed_trades': closed_trades, 'open_trades': open_trades, 'wins': wins, 'losses': losses, 'total_profit': total_profit, 'return_pct': return_pct, 'realized_profit': realized, 'unrealized_profit': round(unrealized, 2), 'win_rate': win_rate, 'avg_return': avg_return, 'avg_unrealized': avg_unrealized,}) # โ˜… ์ˆ˜์ต๋ฅ (%) ๊ธฐ์ค€ ์ •๋ ฌ โ€” HoF์™€ ๋™์ผํ•œ ๊ธฐ์ค€ result.sort(key=lambda x: x['return_pct'], reverse=True) return result[:limit] async def get_ticker_positions(db_path: str, ticker: str) -> Dict: """ํŠน์ • ์ข…๋ชฉ ํฌ์ง€์…˜ ๋ชฉ๋ก + ์‹ค์‹œ๊ฐ„ ๋ฏธ์‹คํ˜„ P&L""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # ํ˜„์žฌ ๊ฐ€๊ฒฉ cursor = await db.execute("SELECT price, change_pct, prev_close, volume, high_24h, low_24h FROM market_prices WHERE ticker=?", (ticker,)) price_row = await cursor.fetchone() price_data = { 'price': price_row[0] if price_row else 0, 'change_pct': round(price_row[1], 2) if price_row else 0, 'prev_close': price_row[2] if price_row else 0, 'volume': price_row[3] if price_row else 0, 'high': price_row[4] if price_row else 0, 'low': price_row[5] if price_row else 0, } if price_row else {} current_price = price_data.get('price', 0) # ์˜คํ”ˆ ํฌ์ง€์…˜ cursor = await db.execute(""" SELECT na.username, na.ai_identity, p.direction, p.gpu_bet, p.entry_price, p.reasoning, p.opened_at, COALESCE(p.leverage, 1), na.agent_id, na.mbti FROM npc_positions p JOIN npc_agents na ON p.agent_id = na.agent_id WHERE p.ticker=? AND p.status='open' ORDER BY p.gpu_bet DESC """, (ticker,)) positions = await cursor.fetchall() longs = []; shorts = [] for r in positions: entry = r[4] or 0; leverage = r[7] or 1 # โ˜… ๋ฏธ์‹คํ˜„ P&L ์‹ค์‹œ๊ฐ„ ๊ณ„์‚ฐ (๋ ˆ๋ฒ„๋ฆฌ์ง€ ์ ์šฉ) if entry > 0 and current_price > 0: unrealized_pct = ((current_price - entry) / entry * 100) * leverage if r[2] == 'short': unrealized_pct = -((current_price - entry) / entry * 100) * leverage unrealized_gpu = round(r[3] * (unrealized_pct / 100), 2) else: unrealized_pct = 0; unrealized_gpu = 0 pos = { 'username': r[0], 'identity': r[1], 'gpu_bet': r[3], 'entry_price': round(entry, 2), 'reasoning': r[5] or '', 'unrealized_pct': round(unrealized_pct, 2), 'unrealized_gpu': unrealized_gpu, 'opened_at': r[6], 'leverage': leverage, 'agent_id': r[8], 'mbti': r[9] or '',} if r[2] == 'long': longs.append(pos) else: shorts.append(pos) total_long_gpu = sum(p['gpu_bet'] for p in longs) total_short_gpu = sum(p['gpu_bet'] for p in shorts) total = total_long_gpu + total_short_gpu sentiment = round(total_long_gpu / total * 100, 1) if total > 0 else 50 return { 'ticker': ticker, 'price': price_data, 'longs': longs, 'shorts': shorts, 'long_count': len(longs), 'short_count': len(shorts), 'total_long_gpu': total_long_gpu, 'total_short_gpu': total_short_gpu, 'sentiment': sentiment,} async def get_all_prices(db_path: str) -> List[Dict]: async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") cursor = await db.execute(""" SELECT mp.ticker, mp.price, mp.change_pct, mp.prev_close, mp.volume, mp.updated_at, COUNT(CASE WHEN p.status='open' AND p.direction='long' THEN 1 END) as long_count, COUNT(CASE WHEN p.status='open' AND p.direction='short' THEN 1 END) as short_count, SUM(CASE WHEN p.status='open' THEN p.gpu_bet ELSE 0 END) as total_bet FROM market_prices mp LEFT JOIN npc_positions p ON mp.ticker = p.ticker AND p.status='open' GROUP BY mp.ticker ORDER BY mp.market_cap DESC """) rows = await cursor.fetchall() result = [] for r in rows: ticker_info = next((t for t in ALL_TICKERS if t['ticker'] == r[0]), None) if not ticker_info: continue total_traders = (r[6] or 0) + (r[7] or 0) result.append({ 'ticker': r[0], 'name': ticker_info['name'], 'emoji': ticker_info['emoji'], 'type': ticker_info['type'], 'cat': ticker_info.get('cat', ticker_info['type']), 'price': round(r[1], 2) if r[1] else 0, 'change_pct': round(r[2], 2) if r[2] else 0, 'volume': r[4] or 0, 'long_count': r[6] or 0, 'short_count': r[7] or 0, 'total_bet': round(r[8] or 0, 1), 'total_traders': total_traders, 'updated_at': r[5],}) # ์นดํ…Œ๊ณ ๋ฆฌ ์ˆœ์„œ ์œ ์ง€: ai โ†’ tech โ†’ dow โ†’ crypto cat_order = {'ai': 0, 'tech': 1, 'dow': 2, 'crypto': 3} ticker_order = {t['ticker']: i for i, t in enumerate(ALL_TICKERS)} result.sort(key=lambda x: (cat_order.get(x.get('cat',''), 9), ticker_order.get(x['ticker'], 99))) return result async def get_trading_stats(db_path: str) -> Dict: async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") cursor = await db.execute("SELECT COUNT(*) FROM npc_positions WHERE status='open'") open_positions = (await cursor.fetchone())[0] cursor = await db.execute("SELECT COUNT(*) FROM npc_positions WHERE status IN ('closed','liquidated')") closed_positions = (await cursor.fetchone())[0] cursor = await db.execute("SELECT SUM(gpu_bet) FROM npc_positions WHERE status='open'") total_at_risk = (await cursor.fetchone())[0] or 0 cursor = await db.execute("SELECT SUM(profit_gpu) FROM npc_positions WHERE status IN ('closed','liquidated')") total_profit = (await cursor.fetchone())[0] or 0 cursor = await db.execute("SELECT COUNT(DISTINCT agent_id) FROM npc_positions") unique_traders = (await cursor.fetchone())[0] cursor = await db.execute("SELECT COUNT(*) FROM market_prices WHERE price > 0") tracked_tickers = (await cursor.fetchone())[0] return { 'open_positions': open_positions, 'closed_positions': closed_positions, 'total_at_risk': round(total_at_risk, 1), 'total_profit': round(total_profit, 1), 'unique_traders': unique_traders, 'tracked_tickers': tracked_tickers,} # ===== ๐Ÿ“Š Market Pulse (News Tab Dashboard) ===== async def get_market_pulse(db_path: str) -> Dict: """Hot movers + trading activity for News tab dashboard""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # โ˜… Hot movers: ALL tickers with NPC activity (open + recent closed 24h) cursor = await db.execute(""" SELECT p.ticker, mp.price, mp.change_pct, COUNT(*) as pos_count, SUM(p.gpu_bet) as total_gpu, COUNT(CASE WHEN p.direction='long' THEN 1 END) as longs, COUNT(CASE WHEN p.direction='short' THEN 1 END) as shorts, COUNT(CASE WHEN p.status='liquidated' AND p.closed_at > datetime('now', '-24 hours') THEN 1 END) as liquidations_24h, AVG(CASE WHEN p.status IN ('closed','liquidated') THEN p.profit_pct END) as avg_pnl_pct, MAX(COALESCE(p.leverage, 1)) as max_leverage, COUNT(CASE WHEN p.status IN ('closed','liquidated') AND p.closed_at > datetime('now', '-24 hours') THEN 1 END) as closed_24h FROM npc_positions p JOIN market_prices mp ON p.ticker = mp.ticker WHERE p.status='open' OR (p.status IN ('closed','liquidated') AND p.closed_at > datetime('now', '-24 hours')) GROUP BY p.ticker ORDER BY total_gpu DESC LIMIT 15 """) hot_movers = [] for r in await cursor.fetchall(): t_info = next((t for t in ALL_TICKERS if t['ticker'] == r[0]), {}) hot_movers.append({ 'ticker': r[0], 'emoji': t_info.get('emoji', '๐Ÿ“Š'), 'name': t_info.get('name', r[0]), 'price': round(r[1] or 0, 2), 'change_pct': round(r[2] or 0, 2), 'pos_count': r[3], 'total_gpu': round(r[4] or 0, 1), 'longs': r[5] or 0, 'shorts': r[6] or 0, 'liquidations_24h': r[7] or 0, 'avg_pnl_pct': round(r[8] or 0, 1), 'max_leverage': r[9] or 1, 'closed_24h': r[10] or 0,}) # โ˜… ํ™œ๋™ 0์ธ ํ‹ฐ์ปค๋„ ํฌํ•จ (๊ฐ€๊ฒฉ ๋ฐ์ดํ„ฐ ์žˆ๋Š” ๊ฒƒ๋งŒ) existing_tickers = {m['ticker'] for m in hot_movers} cursor2 = await db.execute("SELECT ticker, price, change_pct FROM market_prices WHERE price > 0") for row in await cursor2.fetchall(): if row[0] not in existing_tickers: t_info = next((t for t in ALL_TICKERS if t['ticker'] == row[0]), {}) hot_movers.append({ 'ticker': row[0], 'emoji': t_info.get('emoji', '๐Ÿ“Š'), 'name': t_info.get('name', row[0]), 'price': round(row[1] or 0, 2), 'change_pct': round(row[2] or 0, 2), 'pos_count': 0, 'total_gpu': 0, 'longs': 0, 'shorts': 0, 'liquidations_24h': 0, 'avg_pnl_pct': 0, 'max_leverage': 1, 'closed_24h': 0,}) # 24h activity stats (๋” ํ’๋ถ€ํ•˜๊ฒŒ) cursor = await db.execute(""" SELECT COUNT(CASE WHEN opened_at > datetime('now', '-24 hours') THEN 1 END) as new_24h, COUNT(CASE WHEN status IN ('closed','liquidated') AND closed_at > datetime('now', '-24 hours') THEN 1 END) as closed_24h, COUNT(CASE WHEN status='liquidated' AND closed_at > datetime('now', '-24 hours') THEN 1 END) as liquidations_24h, SUM(CASE WHEN status IN ('closed','liquidated') AND closed_at > datetime('now', '-24 hours') THEN ABS(profit_gpu) ELSE 0 END) as volume_24h, COUNT(CASE WHEN status='open' THEN 1 END) as total_open, SUM(CASE WHEN status='open' THEN gpu_bet ELSE 0 END) as total_at_risk FROM npc_positions """) act = await cursor.fetchone() return { 'hot_movers': hot_movers, 'activity': { 'trades_24h': (act[0] or 0) + (act[1] or 0), 'new_positions_24h': act[0] or 0, 'closed_24h': act[1] or 0, 'liquidations_24h': act[2] or 0, 'volume_24h': round(act[3] or 0, 1), 'total_open': act[4] or 0, 'total_at_risk': round(act[5] or 0, 1),}} # ===== ๐Ÿ”ฌ NPC Research Economy ===== RESEARCH_GPU_PRICES = {'A': 50, 'B': 30, 'C': 15, 'D': 5} async def save_research_report(db_path: str, report: Dict) -> int: """Save NPC-authored research report, return report ID""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") grade = report.get('quality_grade', 'C'); gpu_price = RESEARCH_GPU_PRICES.get(grade, 15) cursor = await db.execute(""" INSERT INTO npc_research_reports (author_agent_id, ticker, title, executive_summary, company_overview, financial_analysis, technical_analysis, industry_analysis, risk_assessment, investment_thesis, catalysts, target_price, upside_pct, rating, quality_grade, author_personality, author_strategy, gpu_price, expected_upside, expected_downside, up_probability, risk_reward, base_prediction) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) """, ( report['author_agent_id'], report['ticker'], report['title'], report.get('executive_summary', ''), report.get('company_overview', ''), report.get('financial_analysis', ''), report.get('technical_analysis', ''), report.get('industry_analysis', ''), report.get('risk_assessment', ''), report.get('investment_thesis', ''), report.get('catalysts', ''), report.get('target_price', 0), report.get('upside_pct', 0), report.get('rating', 'Hold'), grade, report.get('author_personality', ''), report.get('author_strategy', ''), gpu_price, report.get('expected_upside', 0), report.get('expected_downside', 0), report.get('up_probability', 50), report.get('risk_reward', 1.0), report.get('base_prediction', 0),)) await db.commit() return cursor.lastrowid async def get_research_feed(db_path: str, ticker: str = None, limit: int = 30) -> List[Dict]: """Get research reports feed with author info""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") query = """ SELECT r.id, r.ticker, r.title, r.executive_summary, r.target_price, r.upside_pct, r.rating, r.quality_grade, r.read_count, r.total_gpu_earned, r.gpu_price, r.author_personality, r.author_strategy, r.created_at, na.username, na.ai_identity, na.mbti, na.agent_id, na.gpu_dollars FROM npc_research_reports r JOIN npc_agents na ON r.author_agent_id = na.agent_id """ params = [] if ticker: query += " WHERE r.ticker=?" params.append(ticker) query += " ORDER BY r.created_at DESC LIMIT ?" params.append(limit) cursor = await db.execute(query, params) reports = [] for r in await cursor.fetchall(): # Get author trading stats stats_c = await db.execute(""" SELECT COUNT(*) as total, COUNT(CASE WHEN profit_gpu > 0 THEN 1 END) as wins FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated') """, (r[17],)) sr = await stats_c.fetchone() total = sr[0] or 0 wr = round((sr[1] or 0) / total * 100) if total > 0 else 0 t_info = next((t for t in ALL_TICKERS if t['ticker'] == r[1]), {}) reports.append({ 'id': r[0], 'ticker': r[1], 'ticker_emoji': t_info.get('emoji', '๐Ÿ“Š'), 'title': r[2], 'summary': (r[3] or '')[:200], 'target_price': r[4], 'upside_pct': round(r[5] or 0, 1), 'rating': r[6], 'quality_grade': r[7], 'read_count': r[8] or 0, 'total_gpu_earned': round(r[9] or 0, 1), 'gpu_price': r[10] or 15, 'author_personality': r[11], 'author_strategy': r[12], 'created_at': r[13], 'author_name': r[14], 'author_identity': r[15], 'author_mbti': r[16], 'author_agent_id': r[17], 'author_gpu': r[18] or 0, 'author_win_rate': wr, 'author_total_trades': total,}) return reports async def get_research_detail(db_path: str, report_id: int) -> Optional[Dict]: """Get full research report detail""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") db.row_factory = aiosqlite.Row cursor = await db.execute(""" SELECT r.id, r.author_agent_id, r.ticker, r.title, r.executive_summary, r.company_overview, r.financial_analysis, r.technical_analysis, r.industry_analysis, r.risk_assessment, r.investment_thesis, r.catalysts, r.target_price, r.upside_pct, r.rating, r.quality_grade, r.author_personality, r.author_strategy, r.read_count, r.total_gpu_earned, r.gpu_price, r.created_at, na.username as author_name, na.ai_identity as author_identity, na.mbti as author_mbti, na.agent_id as author_id FROM npc_research_reports r JOIN npc_agents na ON r.author_agent_id = na.agent_id WHERE r.id=? """, (report_id,)) r = await cursor.fetchone() if not r: return None # Increment read count await db.execute("UPDATE npc_research_reports SET read_count = read_count + 1 WHERE id=?", (report_id,)) await db.commit() # โ˜… ์•ˆ์ „ํ•˜๊ฒŒ elasticity ํ•„๋“œ ์กฐํšŒ (๊ธฐ์กด DB ํ˜ธํ™˜) exp_up = exp_dn = bp = 0; up_prob = 50; rr = 1.0 try: c2 = await db.execute( "SELECT expected_upside, expected_downside, up_probability, risk_reward, base_prediction FROM npc_research_reports WHERE id=?", (report_id,)) e = await c2.fetchone() if e: exp_up = e[0] or 0; exp_dn = e[1] or 0; up_prob = e[2] or 50; rr = e[3] or 1.0; bp = e[4] or 0 except: pass return { 'id': r['id'], 'author_agent_id': r['author_agent_id'], 'ticker': r['ticker'], 'title': r['title'], 'executive_summary': r['executive_summary'], 'company_overview': r['company_overview'], 'financial_analysis': r['financial_analysis'], 'technical_analysis': r['technical_analysis'], 'industry_analysis': r['industry_analysis'], 'risk_assessment': r['risk_assessment'], 'investment_thesis': r['investment_thesis'], 'catalysts': r['catalysts'], 'target_price': r['target_price'], 'upside_pct': r['upside_pct'], 'rating': r['rating'], 'quality_grade': r['quality_grade'], 'author_personality': r['author_personality'], 'author_strategy': r['author_strategy'], 'read_count': (r['read_count'] or 0) + 1, 'total_gpu_earned': r['total_gpu_earned'], 'gpu_price': r['gpu_price'], 'created_at': r['created_at'], 'author_name': r['author_name'], 'author_identity': r['author_identity'], 'author_mbti': r['author_mbti'], 'author_id': r['author_id'], 'expected_upside': exp_up, 'expected_downside': exp_dn, 'up_probability': up_prob, 'risk_reward': rr, 'base_prediction': bp,} async def purchase_research(db_path: str, buyer_agent_id: str, report_id: int) -> Dict: """NPC purchases research โ€” GPU transfer from buyer to author""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # Check if already purchased cursor = await db.execute( "SELECT id FROM npc_research_purchases WHERE buyer_agent_id=? AND report_id=?", (buyer_agent_id, report_id)) if await cursor.fetchone(): return {'success': False, 'reason': 'already_purchased'} # Get report info cursor = await db.execute( "SELECT author_agent_id, gpu_price, ticker FROM npc_research_reports WHERE id=?", (report_id,)) report = await cursor.fetchone() if not report: return {'success': False, 'reason': 'report_not_found'} author_id, gpu_price, ticker = report if buyer_agent_id == author_id: return {'success': False, 'reason': 'cannot_buy_own'} # Check buyer balance cursor = await db.execute("SELECT gpu_dollars FROM npc_agents WHERE agent_id=?", (buyer_agent_id,)) buyer = await cursor.fetchone() if not buyer or (buyer[0] or 0) < gpu_price: return {'success': False, 'reason': 'insufficient_gpu'} # Transfer GPU await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars - ? WHERE agent_id=?", (gpu_price, buyer_agent_id)) await db.execute("UPDATE npc_agents SET gpu_dollars = gpu_dollars + ? WHERE agent_id=?", (gpu_price, author_id)) await db.execute("UPDATE npc_research_reports SET total_gpu_earned = total_gpu_earned + ? WHERE id=?", (gpu_price, report_id)) # Record purchase await db.execute(""" INSERT INTO npc_research_purchases (buyer_agent_id, report_id, gpu_paid) VALUES (?, ?, ?) """, (buyer_agent_id, report_id, gpu_price)) await db.commit() return {'success': True, 'gpu_paid': gpu_price, 'author_id': author_id, 'ticker': ticker} async def get_research_stats(db_path: str) -> Dict: """Research economy overview stats""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") try: c1 = await db.execute("SELECT COUNT(*), SUM(total_gpu_earned), SUM(read_count) FROM npc_research_reports") r1 = await c1.fetchone() c2 = await db.execute("SELECT COUNT(*), SUM(gpu_paid) FROM npc_research_purchases") r2 = await c2.fetchone() c3 = await db.execute("SELECT COUNT(DISTINCT author_agent_id) FROM npc_research_reports") r3 = await c3.fetchone() return { 'total_reports': r1[0] or 0, 'total_gpu_earned': round(r1[1] or 0, 1), 'total_reads': r1[2] or 0, 'total_purchases': r2[0] or 0, 'total_gpu_spent': round(r2[1] or 0, 1), 'unique_authors': r3[0] or 0,} except: return {'total_reports': 0, 'total_gpu_earned': 0, 'total_reads': 0, 'total_purchases': 0, 'total_gpu_spent': 0, 'unique_authors': 0} # ===== ๐Ÿ† HALL OF FAME โ€” ์ˆ˜์ต๋ฅ  ํƒ€์ž„๋ผ์ธ ===== async def record_profit_snapshots(db_path: str, top_n: int = 50) -> int: """Top N NPC์˜ ํ˜„์žฌ ์ˆ˜์ต ์ƒํƒœ๋ฅผ 1์‹œ๊ฐ„ ๋‹จ์œ„ ์Šค๋ƒ…์ƒท ์ €์žฅ""" from datetime import datetime, timezone now = datetime.now(timezone.utc) snapshot_hour = now.strftime('%Y-%m-%dT%H') # "2026-02-23T14" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # ํ˜„์žฌ ์‹œ์„ธ price_cursor = await db.execute("SELECT ticker, price FROM market_prices WHERE price > 0") prices = {r[0]: r[1] for r in await price_cursor.fetchall()} # ํฌ์ง€์…˜์ด ์žˆ๋Š” NPC ์ „์ฒด ์กฐํšŒ cursor = await db.execute(""" SELECT na.agent_id, na.gpu_dollars, COUNT(CASE WHEN p.status IN ('closed','liquidated') THEN 1 END) as closed_trades, COUNT(CASE WHEN p.status='open' THEN 1 END) as open_trades, SUM(CASE WHEN p.status IN ('closed','liquidated') THEN p.profit_gpu ELSE 0 END) as realized_profit, COUNT(CASE WHEN p.status IN ('closed','liquidated') AND p.profit_gpu > 0 THEN 1 END) as wins FROM npc_agents na JOIN npc_positions p ON na.agent_id = p.agent_id GROUP BY na.agent_id HAVING (closed_trades + open_trades) > 0 """) rows = await cursor.fetchall() scored = [] for r in rows: agent_id, gpu, closed, opens, realized, wins = r[0], r[1] or 0, r[2] or 0, r[3] or 0, r[4] or 0, r[5] or 0 # ๋ฏธ์‹คํ˜„ ์ˆ˜์ต ๊ณ„์‚ฐ unrealized = 0.0 pos_c = await db.execute( "SELECT ticker, direction, entry_price, gpu_bet, COALESCE(leverage,1) FROM npc_positions WHERE agent_id=? AND status='open'", (agent_id,)) for pos in await pos_c.fetchall(): tk, d, entry, bet, lev = pos cur = prices.get(tk, 0) if entry and entry > 0 and cur > 0: chg = (cur - entry) / entry if d == 'short': chg = -chg unrealized += round(bet * chg * lev, 2) total = round(realized + unrealized, 2) wr = round(wins / closed * 100, 1) if closed > 0 else 0 scored.append((agent_id, gpu, total, round(realized, 2), round(unrealized, 2), opens, closed, wr)) # ์ƒ์œ„ top_n ์ €์žฅ scored.sort(key=lambda x: x[2], reverse=True) count = 0 for s in scored[:top_n]: agent_id, gpu, total, real, unreal, opens, closed, wr = s try: await db.execute(""" INSERT INTO npc_profit_snapshots (agent_id, snapshot_hour, gpu_balance, total_profit, realized_profit, unrealized_profit, open_positions, closed_trades, win_rate) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(agent_id, snapshot_hour) DO UPDATE SET gpu_balance=excluded.gpu_balance, total_profit=excluded.total_profit, realized_profit=excluded.realized_profit, unrealized_profit=excluded.unrealized_profit, open_positions=excluded.open_positions, closed_trades=excluded.closed_trades, win_rate=excluded.win_rate, recorded_at=CURRENT_TIMESTAMP """, (agent_id, snapshot_hour, gpu, total, real, unreal, opens, closed, wr)) count += 1 except Exception as e: logger.warning(f"Snapshot error {agent_id}: {e}") # 30์ผ ์ด์ƒ ๋œ ๋ฐ์ดํ„ฐ ์ •๋ฆฌ await db.execute(""" DELETE FROM npc_profit_snapshots WHERE recorded_at < datetime('now', '-30 days') AND snapshot_hour NOT LIKE '%T00' AND snapshot_hour NOT LIKE '%T06' AND snapshot_hour NOT LIKE '%T12' AND snapshot_hour NOT LIKE '%T18' """) await db.commit() logger.info(f"๐Ÿ† Hall of Fame: {count} snapshots recorded for {snapshot_hour}") return count async def backfill_profit_snapshots(db_path: str, force: bool = False) -> int: """๊ธฐ์กด npc_positions ๋ฐ์ดํ„ฐ๋กœ ๊ณผ๊ฑฐ ์Šค๋ƒ…์ƒท ์—ญ์‚ฐ ๋ณต์› (์ตœ์ดˆ 1ํšŒ)""" from datetime import datetime, timezone, timedelta async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # ์ด๋ฏธ ์ถฉ๋ถ„ํ•œ ์Šค๋ƒ…์ƒท์ด ์žˆ์œผ๋ฉด ์Šคํ‚ต (force ๋ชจ๋“œ ์ œ์™ธ) if not force: cnt_c = await db.execute("SELECT COUNT(DISTINCT snapshot_hour) FROM npc_profit_snapshots") existing = (await cnt_c.fetchone())[0] or 0 if existing >= 10: logger.info(f"๐Ÿ† Backfill skipped โ€” already {existing} snapshot hours") return 0 # ๊ฐ€์žฅ ์˜ค๋ž˜๋œ ํฌ์ง€์…˜ ์‹œ์  ์ฐพ๊ธฐ oldest_c = await db.execute("SELECT MIN(opened_at) FROM npc_positions WHERE opened_at IS NOT NULL") oldest_row = await oldest_c.fetchone() if not oldest_row or not oldest_row[0]: logger.info("๐Ÿ† Backfill skipped โ€” no positions found") return 0 try: oldest_time = datetime.fromisoformat(str(oldest_row[0]).replace('Z', '+00:00')) except: oldest_time = datetime.now(timezone.utc) - timedelta(hours=72) now = datetime.now(timezone.utc) # ์ตœ๋Œ€ 7์ผ ์—ญ์‚ฐ (๋„ˆ๋ฌด ์˜ค๋ž˜๋˜๋ฉด ์ œํ•œ) start = max(oldest_time, now - timedelta(days=7)) # ํ˜„์žฌ ์‹œ์„ธ (์˜คํ”ˆ ํฌ์ง€์…˜ ๋ฏธ์‹คํ˜„ ๊ณ„์‚ฐ์šฉ) pc = await db.execute("SELECT ticker, price FROM market_prices WHERE price > 0") current_prices = {r[0]: r[1] for r in await pc.fetchall()} # price_history์—์„œ ์‹œ๊ฐ„๋ณ„ ๊ฐ€๊ฒฉ ๋งคํ•‘ ๊ตฌ์ถ• ph_c = await db.execute(""" SELECT ticker, price, strftime('%Y-%m-%dT%H', recorded_at) as hour FROM price_history WHERE recorded_at >= ? ORDER BY recorded_at ASC """, (start.strftime('%Y-%m-%d %H:%M:%S'),)) # ์‹œ๊ฐ„๋ณ„ ๋งˆ์ง€๋ง‰ ๊ฐ€๊ฒฉ hourly_prices = {} # {hour: {ticker: price}} for tk, price, hour in await ph_c.fetchall(): if hour not in hourly_prices: hourly_prices[hour] = {} hourly_prices[hour][tk] = price # ๋ชจ๋“  ํฌ์ง€์…˜ ๋กœ๋“œ (์—ด๋ฆผ/๋‹ซํž˜ ์‹œ์  ํฌํ•จ) pos_c = await db.execute(""" SELECT agent_id, ticker, direction, entry_price, gpu_bet, COALESCE(leverage,1), status, profit_gpu, opened_at, closed_at FROM npc_positions WHERE opened_at IS NOT NULL ORDER BY opened_at ASC """) all_positions = await pos_c.fetchall() # ์—์ด์ „ํŠธ๋ณ„ GPU ์ž”๊ณ  gpu_c = await db.execute("SELECT agent_id, gpu_dollars FROM npc_agents") agent_gpu = {r[0]: r[1] or 0 for r in await gpu_c.fetchall()} # ์‹œ๊ฐ„ ์Šฌ๋กฏ ์ƒ์„ฑ (1์‹œ๊ฐ„ ๋‹จ์œ„) hours_list = [] t = start.replace(minute=0, second=0, microsecond=0) while t <= now: hours_list.append(t.strftime('%Y-%m-%dT%H')) t += timedelta(hours=1) if not hours_list: return 0 # ๊ฐ ์‹œ๊ฐ„๋Œ€๋ณ„ ๋ˆ„์  ์ˆ˜์ต ๊ณ„์‚ฐ total_inserted = 0 # ์—์ด์ „ํŠธ โ†’ ํฌ์ง€์…˜ ๋ถ„๋ฅ˜ agent_positions = {} for pos in all_positions: aid = pos[0] if aid not in agent_positions: agent_positions[aid] = [] agent_positions[aid].append(pos) # ์ƒ์œ„ NPC ์„ ์ • (ํ˜„์žฌ ๊ธฐ์ค€ ์ด ๊ฑฐ๋ž˜ ์ˆ˜ Top 50) agent_trade_count = {aid: len(plist) for aid, plist in agent_positions.items()} top_agents = sorted(agent_trade_count.keys(), key=lambda a: agent_trade_count[a], reverse=True)[:50] for hour_str in hours_list: # ์ด ์‹œ๊ฐ„๋Œ€์˜ ๊ฐ€๊ฒฉ (์—†์œผ๋ฉด ์ด์ „ ์‹œ๊ฐ„๋Œ€ or ํ˜„์žฌ๊ฐ€) prices_at_hour = {} # ๋ˆ„์ ์œผ๋กœ ๊ฐ€์žฅ ์ตœ๊ทผ ๊ฐ€๊ฒฉ ์‚ฌ์šฉ for h in hours_list: if h > hour_str: break if h in hourly_prices: prices_at_hour.update(hourly_prices[h]) # ๋น ์ง„ ์ข…๋ชฉ์€ ํ˜„์žฌ๊ฐ€๋กœ ์ฑ„์›€ for tk, p in current_prices.items(): if tk not in prices_at_hour: prices_at_hour[tk] = p hour_dt_str = hour_str.replace('T', ' ') + ':59:59' for aid in top_agents: positions = agent_positions.get(aid, []) realized = 0.0 unrealized = 0.0 closed_count = 0 open_count = 0 wins = 0 for pos in positions: _, tk, direction, entry, bet, lev, status, profit_gpu, opened_at, closed_at = pos # ์ด ์‹œ์  ์ด์ „์— ์˜คํ”ˆ๋œ ํฌ์ง€์…˜๋งŒ if opened_at and str(opened_at) > hour_dt_str: continue if status == 'closed' and closed_at and str(closed_at) <= hour_dt_str: # ์ด ์‹œ์ ์— ์ด๋ฏธ ์ฒญ์‚ฐ๋จ realized += (profit_gpu or 0) closed_count += 1 if (profit_gpu or 0) > 0: wins += 1 else: # ์ด ์‹œ์ ์— ์•„์ง ์˜คํ”ˆ (๋˜๋Š” ๋‚˜์ค‘์— ์ฒญ์‚ฐ) if status == 'closed' and closed_at and str(closed_at) > hour_dt_str: # ์•„์ง ์•ˆ ๋‹ซํžŒ ์ƒํƒœ๋กœ ์ทจ๊ธ‰ cur = prices_at_hour.get(tk, 0) if entry and entry > 0 and cur > 0: chg = (cur - entry) / entry if direction == 'short': chg = -chg unrealized += bet * chg * lev open_count += 1 elif status == 'open': cur = prices_at_hour.get(tk, 0) if entry and entry > 0 and cur > 0: chg = (cur - entry) / entry if direction == 'short': chg = -chg unrealized += bet * chg * lev open_count += 1 total = round(realized + unrealized, 2) wr = round(wins / closed_count * 100, 1) if closed_count > 0 else 0 try: await db.execute(""" INSERT INTO npc_profit_snapshots (agent_id, snapshot_hour, gpu_balance, total_profit, realized_profit, unrealized_profit, open_positions, closed_trades, win_rate) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(agent_id, snapshot_hour) DO NOTHING """, (aid, hour_str, agent_gpu.get(aid, 0), total, round(realized, 2), round(unrealized, 2), open_count, closed_count, wr)) total_inserted += 1 except: pass await db.commit() logger.info(f"๐Ÿ† Backfill complete: {total_inserted} snapshots across {len(hours_list)} hours for {len(top_agents)} NPCs") return total_inserted async def get_hall_of_fame_data(db_path: str, period: str = '3d', limit: int = 30) -> Dict: """Hall of Fame: Top 30 ์ˆ˜์ต๋ฅ (%) ํƒ€์ž„๋ผ์ธ + ๋žญํ‚น (positions ๊ธฐ๋ฐ˜, ์Šค๋ƒ…์ƒท ๋ถˆํ•„์š”)""" from datetime import datetime, timezone, timedelta INITIAL_GPU = 10000.0 period_hours = {'24h': 24, '3d': 72, '7d': 168, '30d': 720, 'all': 9999} hours = period_hours.get(period, 72) now = datetime.now(timezone.utc).replace(tzinfo=None) # naive UTC cutoff = now - timedelta(hours=min(hours, 720)) async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # ํ˜„์žฌ ์‹œ์„ธ pc = await db.execute("SELECT ticker, price FROM market_prices WHERE price > 0") prices = {r[0]: r[1] for r in await pc.fetchall()} # โ”€โ”€ ํ˜„์žฌ Top N ๋žญํ‚น (์‹ค์‹œ๊ฐ„) โ”€โ”€ cursor = await db.execute(""" SELECT na.agent_id, na.username, na.ai_identity, na.mbti, na.gpu_dollars, COUNT(CASE WHEN p.status IN ('closed','liquidated') THEN 1 END) as closed_trades, COUNT(CASE WHEN p.status='open' THEN 1 END) as open_trades, SUM(CASE WHEN p.status IN ('closed','liquidated') THEN p.profit_gpu ELSE 0 END) as realized, COUNT(CASE WHEN p.status IN ('closed','liquidated') AND p.profit_gpu > 0 THEN 1 END) as wins FROM npc_agents na JOIN npc_positions p ON na.agent_id = p.agent_id GROUP BY na.agent_id HAVING (closed_trades + open_trades) > 0 """) all_rows = await cursor.fetchall() rankings = [] for r in all_rows: aid, name, ident, mbti, gpu, closed, opens, realized, wins = r gpu = gpu or 0; closed = closed or 0; opens = opens or 0; realized = realized or 0; wins = wins or 0 unrealized = 0.0 pos_c = await db.execute( "SELECT ticker, direction, entry_price, gpu_bet, COALESCE(leverage,1) FROM npc_positions WHERE agent_id=? AND status='open'", (aid,)) for pos in await pos_c.fetchall(): tk, d, entry, bet, lev = pos cur = prices.get(tk, 0) if entry and entry > 0 and cur > 0: chg = (cur - entry) / entry if d == 'short': chg = -chg unrealized += round(bet * chg * lev, 2) total = round(realized + unrealized, 2) return_pct = round(total / INITIAL_GPU * 100, 2) wr = round(wins / closed * 100, 1) if closed > 0 else 0 fav_c = await db.execute( "SELECT ticker, COUNT(*) as cnt FROM npc_positions WHERE agent_id=? GROUP BY ticker ORDER BY cnt DESC LIMIT 2", (aid,)) favs = [f[0] for f in await fav_c.fetchall()] best_c = await db.execute("SELECT MAX(profit_gpu) FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated')", (aid,)) best = (await best_c.fetchone())[0] or 0 worst_c = await db.execute("SELECT MIN(profit_gpu) FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated')", (aid,)) worst = (await worst_c.fetchone())[0] or 0 rankings.append({ 'agent_id': aid, 'username': name, 'identity': ident, 'mbti': mbti, 'gpu': round(gpu, 1), 'total_profit': total, 'return_pct': return_pct, 'realized': round(realized, 2), 'unrealized': round(unrealized, 2), 'closed_trades': closed, 'open_trades': opens, 'wins': wins, 'win_rate': wr, 'fav_tickers': favs, 'best_trade': round(best, 1), 'worst_trade': round(worst, 1), }) rankings.sort(key=lambda x: x['return_pct'], reverse=True) top30 = rankings[:limit] top30_ids = [r['agent_id'] for r in top30] name_map = {r['agent_id']: r['username'] for r in top30} # โ”€โ”€ ํƒ€์ž„๋ผ์ธ: npc_positions ์˜ closed_at ๊ธฐ๋ฐ˜ ๋ˆ„์  ์ˆ˜์ต๋ฅ  ์ง์ ‘ ๊ณ„์‚ฐ โ”€โ”€ # ๊ฐ NPC์˜ ์ฒญ์‚ฐ ๊ฑฐ๋ž˜๋ฅผ ์‹œ๊ฐ„์ˆœ ์ •๋ ฌ โ†’ ๋ˆ„์  ์ˆ˜์ต โ†’ ์ˆ˜์ต๋ฅ (%) timeline_raw = {} # {agent_id: [(hour_str, cumulative_return_pct), ...]} for r in top30: aid = r['agent_id'] # ์ฒญ์‚ฐ๋œ ๊ฑฐ๋ž˜๋งŒ (์‹œ๊ฐ„์ˆœ) tc = await db.execute(""" SELECT profit_gpu, closed_at FROM npc_positions WHERE agent_id=? AND status IN ('closed','liquidated') AND closed_at IS NOT NULL ORDER BY closed_at ASC """, (aid,)) trades = await tc.fetchall() if not trades: continue cumulative = 0.0 points = [] for profit_gpu, closed_at in trades: cumulative += (profit_gpu or 0) try: ct = datetime.fromisoformat(str(closed_at).replace('Z', '').replace('+00:00', '')) except: continue if ct < cutoff: continue # ๊ธฐ๊ฐ„ ํ•„ํ„ฐ hour_str = ct.strftime('%Y-%m-%dT%H') ret_pct = round(cumulative / INITIAL_GPU * 100, 2) points.append((hour_str, ret_pct)) # ๋ฏธ์‹คํ˜„ ํฌํ•จ ํ˜„์žฌ ์‹œ์  ์ถ”๊ฐ€ current_ret = round((cumulative + r['unrealized']) / INITIAL_GPU * 100, 2) now_hour = now.strftime('%Y-%m-%dT%H') points.append((now_hour, current_ret)) # ๋™์ผ ์‹œ๊ฐ„๋Œ€ ์ค‘๋ณต ์ œ๊ฑฐ (๋งˆ์ง€๋ง‰ ๊ฐ’ ์œ ์ง€) deduped = {} for h, v in points: deduped[h] = v timeline_raw[aid] = deduped # ๋ชจ๋“  ์‹œ๊ฐ„ ์Šฌ๋กฏ ์ˆ˜์ง‘ all_hours = set() for aid, pts in timeline_raw.items(): all_hours.update(pts.keys()) # ๊ธฐ๊ฐ„ ์‹œ์ž‘์ ๋„ ์ถ”๊ฐ€ (0% ์ถœ๋ฐœ์ ) start_hour = cutoff.strftime('%Y-%m-%dT%H') all_hours.add(start_hour) sorted_hours = sorted(all_hours) # ๋‹ค์šด์ƒ˜ํ”Œ๋ง if len(sorted_hours) > 150: step = max(1, len(sorted_hours) // 120) last = sorted_hours[-1] sorted_hours = [h for i, h in enumerate(sorted_hours) if i % step == 0] if sorted_hours[-1] != last: sorted_hours.append(last) # ํƒ€์ž„๋ผ์ธ ์กฐ๋ฆฝ (๊ฐ NPC๋ณ„ forward-fill, O(hours * npcs)) # ๋จผ์ € NPC๋ณ„ forward-fill ๋ฐฐ์—ด ์‚ฌ์ „ ๊ณ„์‚ฐ npc_filled = {} # {aid: [val_for_each_hour]} for r in top30: aid = r['agent_id'] pts = timeline_raw.get(aid, {}) filled = [] last_val = 0.0 # ์‹œ์ž‘ = 0% for hour in sorted_hours: if hour in pts: last_val = pts[hour] filled.append(last_val) npc_filled[aid] = filled timeline = [] for idx, hour in enumerate(sorted_hours): label = hour[5:13].replace('T', ' ') + 'h' point = {'time': label} for r in top30: point[name_map[r['agent_id']]] = npc_filled[r['agent_id']][idx] timeline.append(point) # ํŒ”๋ ˆํŠธ palette = [ '#FFD700', '#E0E0E0', '#CD7F32', '#00E5FF', '#FF4081', '#76FF03', '#FF9100', '#E040FB', '#00BFA5', '#FFD740', '#8C9EFF', '#B388FF', '#82B1FF', '#A7FFEB', '#FF8A80', '#EA80FC', '#80D8FF', '#CCFF90', '#FFE57F', '#FF80AB', '#B9F6CA', '#84FFFF', '#CF94DA', '#FFB74D', '#E57373', '#90A4AE', '#A1887F', '#CE93D8', '#EF9A9A', '#BCAAA4', ] npc_lines = [{'key': r['username'], 'color': palette[i % len(palette)], 'rank': i + 1} for i, r in enumerate(top30)] return { 'rankings': top30, 'timeline': timeline, 'npc_lines': npc_lines, 'period': period, 'initial_gpu': INITIAL_GPU, 'snapshot_count': len(sorted_hours), } async def get_npc_trade_history(db_path: str, agent_id: str) -> Dict: """NPC ๊ฐœ๋ณ„ ๊ฑฐ๋ž˜ ํžˆ์Šคํ† ๋ฆฌ ์ƒ์„ธ""" async with aiosqlite.connect(db_path, timeout=30.0) as db: await db.execute("PRAGMA busy_timeout=30000") # NPC ๊ธฐ๋ณธ ์ •๋ณด nc = await db.execute("SELECT username, ai_identity, mbti, gpu_dollars FROM npc_agents WHERE agent_id=?", (agent_id,)) npc = await nc.fetchone() if not npc: return {'error': 'NPC not found'} # ํ˜„์žฌ ์‹œ์„ธ pc = await db.execute("SELECT ticker, price FROM market_prices WHERE price > 0") prices = {r[0]: r[1] for r in await pc.fetchall()} # ์ „์ฒด ํฌ์ง€์…˜ (์ตœ์‹ ์ˆœ) tc = await db.execute(""" SELECT id, ticker, direction, entry_price, exit_price, gpu_bet, COALESCE(leverage,1), status, profit_gpu, profit_pct, liquidated, opened_at, closed_at, reasoning FROM npc_positions WHERE agent_id=? ORDER BY id DESC LIMIT 50 """, (agent_id,)) trades = [] for t in await tc.fetchall(): pid, tk, direction, entry, exit_p, bet, lev, status, pnl, pnl_pct, liq, opened, closed, reason = t # ์˜คํ”ˆ ํฌ์ง€์…˜ ๋ฏธ์‹คํ˜„ ๊ณ„์‚ฐ if status == 'open': cur = prices.get(tk, 0) if entry and entry > 0 and cur > 0: chg = (cur - entry) / entry if direction == 'short': chg = -chg pnl = round(bet * chg * lev, 2) pnl_pct = round(chg * lev * 100, 2) trades.append({ 'id': pid, 'ticker': tk, 'direction': direction, 'entry': round(entry, 4) if entry else 0, 'exit': round(exit_p, 4) if exit_p else None, 'bet': round(bet, 1), 'leverage': lev, 'status': status, 'pnl': round(pnl or 0, 2), 'pnl_pct': round(pnl_pct or 0, 2), 'liquidated': bool(liq), 'opened_at': str(opened) if opened else None, 'closed_at': str(closed) if closed else None, 'reasoning': reason, }) return { 'username': npc[0], 'identity': npc[1], 'mbti': npc[2], 'gpu': round(npc[3] or 0, 1), 'trades': trades, 'total_trades': len(trades), }