lucky / server.py
Commander
๐Ÿš€ COUNCIL DEPLOY: Emergency Direct Uplink
2709e7e
import os
import sys
import traceback
import re
import shutil
import subprocess
import datetime
import json
import time
import asyncio
import threading
import requests # ๋ˆ„๋ฝ๋œ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ์ถ”๊ฐ€
# ์„œ๋ฒ„ ์‹œ์ž‘ ์‹œ๊ฐ„ ๊ธฐ๋ก
SERVER_START_TIME = time.time()
# [๋””๋ฒ„๊ทธ] ํŒŒ์ผ์ด ์‹คํ–‰๋˜์ž๋งˆ์ž ์ถœ๋ ฅ
try:
sys.stderr.reconfigure(encoding='utf-8')
sys.stdout.reconfigure(encoding='utf-8')
except:
pass
print("DEBUG: server.py ํŒŒ์ผ์ด ์‹คํ–‰๋˜์—ˆ์Šต๋‹ˆ๋‹ค.", file=sys.stderr, flush=True)
# ์—๋Ÿฌ ๋ฐœ์ƒ ์‹œ ์ฐฝ์ด ๋ฐ”๋กœ ๊บผ์ง€์ง€ ์•Š๊ฒŒ ํ•˜๊ธฐ ์œ„ํ•œ ์•ˆ์ „์žฅ์น˜
try:
def log(msg):
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"[{timestamp}] {msg}", file=sys.stderr, flush=True)
try:
with open("server_log.txt", "a", encoding="utf-8") as f:
f.write(f"[{timestamp}] {msg}\n")
except:
pass
def auto_save_safety_backup():
"""ํ•ต์‹ฌ ํŒŒ์ผ(๋Œ€ํ™” ๊ธฐ๋ก)์„ ์•ˆ์ „ ๊ตฌ์—ญ์— ์‹ค์‹œ๊ฐ„์œผ๋กœ ๊ฐœ๋ณ„ ๋ณด๊ด€ํ•ฉ๋‹ˆ๋‹ค."""
try:
# Cross-platform safety backup path
safety_root = os.getenv("SAFETY_BACKUP_DIR", os.path.join(os.path.expanduser("~"), "Gemini_Safety_Backup"))
if not os.path.exists(safety_root): os.makedirs(safety_root)
ts = datetime.datetime.now().strftime("%Y%m%d_%H%M")
src = "chat_history.json"
if os.path.exists(os.path.join("..", src)): src = os.path.join("..", src)
if os.path.exists(src):
dst = os.path.join(safety_root, f"chat_history_{ts}.json")
shutil.copy2(src, dst)
src_env = ".env"
if os.path.exists(os.path.join("..", ".env")): src_env = os.path.join("..", ".env")
if os.path.exists(src_env):
shutil.copy2(src_env, os.path.join(safety_root, f".env_{ts}"))
return True
except: pass
return False
def log_shared_chat(role, content, model_type="unknown"):
"""๋ชจ๋“  IDE์—์„œ ๊ณต์œ ๋˜๋Š” ํ†ตํ•ฉ ์ฑ— ๋กœ๊ทธ๋ฅผ ๊ธฐ๋กํ•ฉ๋‹ˆ๋‹ค."""
try:
# Use relative path or env var for shared chat history
actual_path = os.getenv("SHARED_CHAT_PATH", "shared_chat_history.json")
history = []
if os.path.exists(actual_path):
with open(actual_path, "r", encoding="utf-8") as f:
try:
history = json.load(f)
except:
history = []
entry = {
"timestamp": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
"ide": os.getenv("IDE_NAME", "Gemini_Master_Node"),
"model": model_type,
"role": role,
"content": content[:1500] # ์ตœ์ ํ™”: 1500์ž (๊ฐ€๋…์„ฑ๊ณผ ์†๋„์˜ ํ™ฉ๊ธˆ๋น„์œจ)
}
history.append(entry)
if len(history) > 100: history = history[-100:]
with open(actual_path, "w", encoding="utf-8") as f:
json.dump(history, f, ensure_ascii=False, indent=2)
except Exception as e:
log(f"โš ๏ธ ํ†ตํ•ฉ ๋กœ๊ทธ ๊ธฐ๋ก ์‹คํŒจ: {e}")
log(f"๐Ÿ“‚ ํ˜„์žฌ ์œ„์น˜: {os.getcwd()}")
log("๐Ÿงฉ [Expert] SWE-bench ๋ฐ ์ž์œจ ์—์ด์ „ํŠธ ๋„๊ตฌ ์ง€์› ๋ชจ๋“œ ํ™œ์„ฑํ™”")
# [Cloud Key Loader]
# GLOBAL_CONFIG.env ๋ฐ .env์—์„œ ๋ชจ๋“  ํด๋ผ์šฐ๋“œ ํ‚ค๋ฅผ ๋กœ๋“œํ•˜์—ฌ OpenRouter/LiteLLM/Render/Colab์„ ํ™œ์„ฑํ™”ํ•ฉ๋‹ˆ๋‹ค.
from dotenv import load_dotenv
def discover_and_load_envs():
"""ํ”„๋กœ์ ํŠธ ๋ฃจํŠธ ๋ฐ ์ƒ์œ„ ํด๋”์—์„œ .env ํŒŒ์ผ๋“ค์„ ์ฐพ์•„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค."""
current = os.path.dirname(os.path.abspath(__file__))
loaded = []
# ์ƒ์œ„ 4๋‹จ๊ณ„๊นŒ์ง€ ์ฐพ์•„๋ด„
for _ in range(4):
for env_name in [".env", "GLOBAL_CONFIG.env"]:
p = os.path.join(current, env_name)
if os.path.exists(p):
load_dotenv(p, override=True)
loaded.append(p)
current = os.path.dirname(current)
return loaded
envs = discover_and_load_envs()
log(f"โœ… ์„ค์ • ํŒŒ์ผ ๋กœ๋“œ ์™„๋ฃŒ: {len(envs)}๊ฐœ ํŒŒ์ผ ๋ฐœ๊ฒฌ")
# API Keys Check & Status Report
REQUIRED_KEYS = ["OPENROUTER_API_KEY", "RENDER_API_KEY", "SUPABASE_KEY", "HUGGINGFACE_TOKEN"]
active_keys = [k for k in REQUIRED_KEYS if os.getenv(k)]
log(f"๐Ÿ”‘ [SYSTEM] Active Cloud Keys: {len(active_keys)}/{len(REQUIRED_KEYS)} Loaded.")
# LiteLLM Config & Fallback Strategy
os.environ["OPENROUTER_API_KEY"] = os.getenv("OPENROUTER_API_KEY", "")
os.environ["HUGGINGFACE_API_KEY"] = os.getenv("HUGGINGFACE_TOKEN", "") # LiteLLM ํ˜ธํ™˜์šฉ
os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY", "")
# [STRATEGY] Multi-Layer Defense Grid (Main -> Backup -> Last Resort)
# 1. Main: Groq (Speed)
# 2. Backup 1: OpenRouter (Variety)
# 3. Backup 2: HuggingFace Inference API (Free Tier)
# 4. Last Resort: Local Ollama (Privacy/Offline)
SYSTEM_MODEL_MAP = {
"fast": ["groq/llama3-8b-8192", "openrouter/meta-llama/llama-3-8b-instruct", "ollama/llama3"],
"smart": ["groq/llama3-70b-8192", "openrouter/meta-llama/llama-3-70b-instruct", "ollama/qwen:14b"],
"coding": ["groq/gemma2-9b-it", "openrouter/google/gemma-2-9b-it", "ollama/codegemma"]
}
log("๐Ÿ”„ [1๋‹จ๊ณ„] ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ๋กœ๋”ฉ ์ค‘...")
# from dotenv import load_dotenv # Moved to Cloud Key Loader
from fastmcp import FastMCP
try:
import litellm
from litellm import completion
except ImportError:
log("โš ๏ธ litellm missing, installing...")
subprocess.check_call([sys.executable, "-m", "pip", "install", "litellm"])
from litellm import completion
log("๐Ÿ”„ [๋‹จ๊ณ„] DuckDuckGo & Tavily ๋กœ๋”ฉ ์ค‘...")
try:
from duckduckgo_search import DDGS
except ImportError:
DDGS = None
try:
from tavily import TavilyClient
except ImportError:
TavilyClient = None
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
# [NEW] Learning Engine Import
import learning_engine
# [NEW] Rules Manager
RULES_DIR = os.path.join(os.path.dirname(__file__), "Rules")
if not os.path.exists(RULES_DIR):
os.makedirs(os.path.join(RULES_DIR, "Trading"))
os.makedirs(os.path.join(RULES_DIR, "Indicators"))
os.makedirs(os.path.join(RULES_DIR, "Patterns"))
# [NEW] SYSTEM BLACKBOX INIT
try:
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
sys.path.append(project_root) # ๋ฃจํŠธ ๊ฒฝ๋กœ ์ถ”๊ฐ€
import system_blackbox
log("๐Ÿ“ผ [BLACKBOX] Flight Recorder Armed & Recording...")
except ImportError:
log("โš ๏ธ [BLACKBOX] Recorder not found, flying without blackbox.")
except Exception as e:
print(f"\nโŒ [์˜ค๋ฅ˜] ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ๋กœ๋”ฉ ์ค‘ ์น˜๋ช…์  ์˜ค๋ฅ˜: {e}")
traceback.print_exc()
sys.exit(1)
# FastAPI ์„œ๋ฒ„ ์ธ์Šคํ„ด์Šค
app = FastAPI(title="Gemini Master Hub API")
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
# MCP ์„œ๋ฒ„ ์ธ์Šคํ„ด์Šค
mcp = FastMCP("Gemini Server")
try:
# [์ด๋ฏธ 90๋ฒˆ ๋ผ์ธ ๊ทผ์ฒ˜์—์„œ ์ˆ˜ํ–‰๋จ]
log(f"โœ… ํ™˜๊ฒฝ ์„ค์ • ์ตœ์ ํ™” ์™„๋ฃŒ")
# API ํ‚ค ์ˆ˜์ง‘
# API ํ‚ค ์ˆ˜์ง‘ (ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๋ฐ ๋กœ๋“œ๋œ ํŒŒ์ผ ๊ธฐ๋ฐ˜)
api_inventory = {"google": [], "xai": [], "openai": [], "perplexity": [], "groq": [], "anthropic": [], "openrouter": []}
# 90๋ฒˆ ๋ผ์ธ ๊ทผ์ฒ˜์—์„œ ๋กœ๋“œ๋œ envs ๋ฆฌ์ŠคํŠธ ํ™œ์šฉ (envs ๋ณ€์ˆ˜๋Š” ํ•จ์ˆ˜ ๋‚ด๋ถ€์— ์žˆ์–ด์„œ ์ ‘๊ทผ ๋ถˆ๊ฐ€ํ•  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ, ์žฌํƒ์ƒ‰)
loaded_envs_files = [os.path.join(os.getcwd(), f) for f in [".env", "GLOBAL_CONFIG.env"] if os.path.exists(f)]
for e_path in loaded_envs_files:
try:
with open(e_path, "r", encoding="utf-8") as f: content = f.read()
except:
continue
for k in api_inventory.keys():
if k == "google":
api_inventory[k] += re.findall(r'AIza[0-9A-Za-z\-_]{30,}', content)
elif k == "openrouter":
api_inventory[k] += re.findall(r'OPENROUTER_API_KEY=(sk-or-v1-[0-9A-Za-z\-_]+)', content)
else:
prefix = {"xai": "xai-", "openai": "sk-", "groq": "gsk_"}.get(k, "")
if prefix:
api_inventory[k] += re.findall(rf'{k.upper()}_API_KEY=({prefix}[0-9A-Za-z\-_]+)', content)
google_keys = list(set(api_inventory["google"]))
xai_keys = list(set(api_inventory["xai"]))
log(f"โœ… AI ์ž์› ํ™•๋ณด: Google({len(google_keys)}), Grok({len(xai_keys)})")
# --- FastAPI ์—”๋“œํฌ์ธํŠธ ---
@app.post("/api/generate_rules_by_keyword")
async def api_generate_rules(request: Request):
try:
data = await request.json()
kw = data.get("keyword", "AUTO")
model = data.get("model", "gemini-2.0-flash")
category = data.get("category", "Trading") # Trading, Indicators, Patterns
log(f"๐Ÿญ ๊ทœ์น™ ์ƒ์„ฑ ์š”์ฒญ: {kw} (Model: {model}, Category: {category})")
# ์‹ค์ œ AI ํ˜ธ์ถœ ๋กœ์ง
prompt = f"'{kw}'์— ๋Œ€ํ•œ ์ „๋ฌธ์ ์ธ ํŠธ๋ ˆ์ด๋”ฉ ๊ทœ์น™์„ JSON ํ˜•์‹์œผ๋กœ ์ƒ์„ฑํ•ด์ค˜. ํฌํ•จํ•  ๋‚ด์šฉ: ์ง„์ž…์กฐ๊ฑด(entry_conditions), ์ฒญ์‚ฐ์กฐ๊ฑด(exit_conditions), ์†์ ˆ๊ฐ€(stop_loss), ์ต์ ˆ๊ฐ€(take_profit), ์ฃผ์˜์‚ฌํ•ญ(risk_factors)."
result = ask_any_model(prompt, model)
# JSON ํŒŒ์‹ฑ ์‹œ๋„
try:
# ๋งˆํฌ๋‹ค์šด ์ฝ”๋“œ ๋ธ”๋ก ์ œ๊ฑฐ
clean_json = re.sub(r'```json\s*|\s*```', '', result).strip()
rule_data = json.loads(clean_json)
# ํŒŒ์ผ ์ €์žฅ
filename = f"{kw.replace(' ', '_')}_Rules.json"
filepath = os.path.join(RULES_DIR, category, filename)
with open(filepath, "w", encoding="utf-8") as f:
json.dump(rule_data, f, indent=2, ensure_ascii=False)
log(f"โœ… ๊ทœ์น™ ํŒŒ์ผ ์ƒ์„ฑ ์™„๋ฃŒ: {filepath}")
return {"status": "success", "message": f"๊ทœ์น™ ์ƒ์„ฑ ์™„๋ฃŒ: {filename}", "data": rule_data}
except Exception as e:
log(f"โš ๏ธ JSON ๋ณ€ํ™˜ ์‹คํŒจ, ํ…์ŠคํŠธ๋กœ ์ €์žฅ: {e}")
# ํ…์ŠคํŠธ๋กœ ๋ฐฑ์—… ์ €์žฅ
filepath = os.path.join(RULES_DIR, category, f"{kw.replace(' ', '_')}_Raw.md")
with open(filepath, "w", encoding="utf-8") as f:
f.write(result)
return {"status": "partial_success", "message": "JSON ํŒŒ์‹ฑ ์‹คํŒจ, ํ…์ŠคํŠธ๋กœ ์ €์žฅ๋จ", "data": result}
except Exception as e:
return {"status": "error", "message": str(e)}
@app.get("/api/get_file_content")
async def api_get_file(path: str):
"""ํŒŒ์ผ ๋‚ด์šฉ ์ฝ๊ธฐ (๋ณด์•ˆ์„ ์œ„ํ•ด Gemini_Project ๋‚ด๋ถ€ ๊ฒฝ๋กœ๋งŒ ํ—ˆ์šฉ)"""
try:
# Root relative path normalization
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
# If path is direct absolute path, keep it if it's within root
if ":" not in path:
full_path = os.path.join(project_root, path)
else:
full_path = path
if os.path.exists(full_path):
with open(full_path, "r", encoding="utf-8") as f:
return {"status": "success", "content": f.read()}
return {"status": "error", "message": f"File not found: {path}"}
except Exception as e:
return {"status": "error", "message": str(e)}
@app.post("/api/save_custom_file")
async def api_save_file(request: Request):
try:
data = await request.json()
path = data.get("path")
content = data.get("content")
if not path.startswith("C:"):
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
full_path = os.path.join(project_root, path)
else:
full_path = path
os.makedirs(os.path.dirname(full_path), exist_ok=True)
with open(full_path, "w", encoding="utf-8") as f:
f.write(content)
return {"status": "success", "message": f"Saved to {path}"}
except Exception as e:
return {"status": "error", "message": str(e)}
@app.post("/api/execute_command")
async def api_run_cmd(request: Request):
"""์ปค๋งจ๋“œ ์‹คํ–‰ (VPN ์‹œ์ž‘, ์‹œ์Šคํ…œ ์žฌ์‹œ์ž‘ ๋“ฑ)"""
try:
data = await request.json()
cmd = data.get("command")
cwd = data.get("cwd", ".")
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
full_cwd = os.path.join(project_root, cwd)
# Use subprocess.Popen to not block the server
subprocess.Popen(f"start cmd /k {cmd}", cwd=full_cwd, shell=True)
return {"status": "success", "message": f"Command '{cmd}' executed in {cwd}"}
except Exception as e:
return {"status": "error", "message": str(e)}
# [NEW] Learning Engine APIs
@app.post("/api/trade/record")
async def api_record_trade(request: Request):
"""๋งค๋งค ์ง„์ž… ๊ธฐ๋ก"""
try:
data = await request.json()
res = learning_engine.record_trade(
data.get("symbol"),
data.get("action"),
data.get("reason"),
data.get("price", "Market")
)
return {"status": "success", "message": res}
except Exception as e:
return {"status": "error", "message": str(e)}
@app.post("/api/trade/feedback")
async def api_trade_feedback(request: Request):
"""๋งค๋งค ๊ฒฐ๊ณผ ๋ฐ ํ”ผ๋“œ๋ฐฑ ์ €์žฅ"""
try:
data = await request.json()
res = learning_engine.update_trade_result(
data.get("symbol"),
data.get("result"), # WIN / LOSS
data.get("feedback")
)
return {"status": "success", "message": res}
except Exception as e:
return {"status": "error", "message": str(e)}
@app.get("/api/trade/lessons")
async def api_get_lessons(symbol: str = "ALL"):
"""๋งค๋งค ๊ตํ›ˆ ์กฐํšŒ"""
try:
res = learning_engine.get_trading_lessons(symbol)
return {"status": "success", "data": res}
except Exception as e:
return {"status": "error", "message": str(e)}
@app.get("/api/stats")
async def get_brain_stats():
"""ํŠธ๋ ˆ์ด๋”ฉ ๋ธŒ๋ ˆ์ธ์˜ ํ˜„์žฌ ์‹ค์‹œ๊ฐ„ ์ƒํƒœ(IQ, ๊ทœ์น™ ์ˆ˜) ๋ฐ˜ํ™˜"""
try:
# 1. ๊ทœ์น™ ์ˆ˜ ๊ณ„์‚ฐ
rule_dir = "04_SYNC_DATA/LEARNED_RULES"
rules = [f for f in os.listdir(rule_dir) if f.endswith('.json')] if os.path.exists(rule_dir) else []
# 2. ํƒ€๊ฒŸ ์ž์‚ฐ ํ™•์ธ
target_file = "04_SYNC_DATA/TARGET_ASSETS.json"
targets = []
if os.path.exists(target_file):
with open(target_file, "r") as f:
targets = json.load(f)
return {
"status": "online",
"brain_iq": len(rules) * 15 + 100, # ๋‹จ์ˆœ ์‹œ๊ฐํ™”์šฉ IQ ๊ณ„์‚ฐ
"total_rules": len(rules),
"active_targets": [t.get('symbol') for t in targets],
"last_update": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
except Exception as e:
return {"status": "error", "message": str(e)}
@app.post("/api/chat")
async def api_chat(request: Request):
try:
data = await request.json()
msg = data.get("message", "")
model = data.get("model", "gemini-2.0-flash")
mode = data.get("mode", "chat") # chat, trade, search, app
if not msg: return {"status": "error", "message": "No message provided"}
# Mode-specific logic
system_prompt = "You are Gemini Master, the supreme orchestrator of all integrated platforms (Firebase, Vercel, Supabase, Notion, etc.)."
if mode == "trade":
system_prompt = "You are the Trading Sentinel. Accessing live market data and Supabase logs."
# ์‹ค์‹œ๊ฐ„ ๊ฐ€๊ฒฉ ์ •๋ณด ์ž๋™ ์ฃผ์ž… ์„œ๋น„์Šค (์˜ˆ์‹œ: BTC)
prices = get_crypto_price("BTC")
wisdom = learning_engine.get_trading_lessons("ALL")
msg = f"[Live Market]: {prices}\n[Context: Trading Wisdom]\n{wisdom}\n\n[User Request]: {msg}"
elif mode == "search":
system_prompt = "You are the Quantum Search engine. Utilizing DuckDuckGo, Tavily, and Google Search."
search_results = web_search_ddg(msg)
msg = f"[Web Search Results]:\n{search_results}\n\n[User Request]: {msg}"
elif mode == "app":
app_id = data.get("app_id", "general")
app_prompts = {
"coder": "You are a Senior Software Engineer specializing in GitHub Actions, Codespaces, and Cursor SSH.",
"analyst": "You are a Market Analyst. Use Supabase data and exchange APIs for real-time insights.",
"notion": "You are a Knowledge Manager. Help organize thoughts into Notion databases and rules.",
"cloud_master": "You are the Cloud Architect. Manage Vercel, Render, and Firebase deployments.",
"trader_alpha": "You are an Elite Algorithmic Trader. Specialized in high-frequency trading rules."
}
# Context enhancement for Analyst
if app_id == "analyst":
news = get_market_news("global market")
# Load Auto-Generated Rules
try:
import glob
rule_files = glob.glob("04_SYNC_DATA/LEARNED_RULES/*.json")
loaded_rules = []
for rf in rule_files[-3:]: # Load latest 3 rules
with open(rf, "r", encoding="utf-8") as f:
loaded_rules.append(json.load(f))
rules_context = json.dumps(loaded_rules, ensure_ascii=False, indent=1)
except: rules_context = "No rules yet."
msg = f"[Context: Real-time News]\n{news}\n\n[Context: Auto-Generated Rules]\n{rules_context}\n\n[User Request]: {msg}"
system_prompt = app_prompts.get(app_id, "You are a specialized AI assistant.")
# Multi-Brain Switching Logic (LiteLLM)
response = ask_any_model(msg, model)
return {"status": "success", "response": response, "mode": mode}
except Exception as e:
return {"status": "error", "message": str(e)}
@app.get("/api/apps/list")
async def api_list_apps():
"""์ง€์›๋˜๋Š” AI ์•ฑ/๋ชจ๋“œ ๋ฆฌ์ŠคํŠธ ๋ฐ˜ํ™˜"""
return {
"status": "success",
"apps": [
{"id": "chat", "name": "Standard Chat", "icon": "๐Ÿ’ฌ", "desc": "Universal Intelligence"},
{"id": "trade", "name": "Trading Sentinel", "icon": "๐Ÿ“ˆ", "desc": "Market & Exchange Hub"},
{"id": "search", "name": "Quantum Search", "icon": "๐Ÿ”", "desc": "Tavily + DDG Global Search"},
{"id": "app", "name": "App: Cloud Architect", "icon": "โ˜๏ธ", "app_id": "cloud_master"},
{"id": "app", "name": "App: Code Expert (SSH)", "icon": "๐Ÿ’ป", "app_id": "coder"},
{"id": "app", "name": "App: Notion Agent", "icon": "๐Ÿ““", "app_id": "notion"},
{"id": "app", "name": "App: Market Analyst", "icon": "๐Ÿ“Š", "app_id": "analyst"},
{"id": "app", "name": "App: Elite Trader", "icon": "๐Ÿฆ…", "app_id": "trader_alpha"}
]
}
@app.get("/api/debate/suggest")
async def api_debate_suggest():
"""๋”ฅ๋งˆ์ธ๋“œ ๋ธŒ๋ ˆ์ธ์ด ํ˜„์žฌ ํ”„๋กœ์ ํŠธ/์‹œ์žฅ ์ƒํ™ฉ์— ๋งž๋Š” ํ† ๋ก  ์ฃผ์ œ๋ฅผ ์ œ์•ˆ"""
prompt = "ํ˜„์žฌ ์ธ๊ณต์ง€๋Šฅ ํŠธ๋ ˆ์ด๋”ฉ ์‹œ์Šคํ…œ์„ ๊ตฌ์ถ• ์ค‘์ด์•ผ. ์šฐ๋ฆฌ '๋”ฅ๋งˆ์ธ๋“œ ์œ„์›ํšŒ(The Council)'๊ฐ€ ํ† ๋ก ํ•  ๋งŒํ•œ ์•„์ฃผ ๋‚ ์นด๋กญ๊ณ  ํ˜์‹ ์ ์ธ ์ฃผ์ œ 3๊ฐ€์ง€๋งŒ ์ œ์•ˆํ•ด์ค˜. (์˜ˆ: ๊ธฐ์ˆ ์  ๋ถ„์„ vs ์˜จ์ฒด์ธ ๋ฐ์ดํ„ฐ, ํ€€ํŠธ ์ „๋žต์˜ ํ•œ๊ณ„ ๋“ฑ)"
try:
# ๋น ๋ฅธ ๋‹ต๋ณ€์„ ์œ„ํ•ด gemini-2.0-flash ์‚ฌ์šฉ
suggestion = ask_any_model(prompt, "gemini-2.0-flash")
return {"status": "success", "suggestions": suggestion}
except:
return {"status": "error", "message": "Failed to generate suggestions"}
@app.get("/api/insights/latest")
async def api_get_insights(refresh: bool = False):
"""์ž์œจ๋ถ„์„ ์—์ด์ „ํŠธ์˜ ์ตœ์‹  ์ธ์‚ฌ์ดํŠธ ๋ฐ˜ํ™˜"""
from .autonomous_analyst import generate_autonomous_insight, INSIGHTS_FILE
if refresh or not os.path.exists(INSIGHTS_FILE):
insight = generate_autonomous_insight()
if not insight: return {"status": "error", "message": "Analysis failed"}
return {"status": "success", "data": insight}
try:
with open(INSIGHTS_FILE, "r", encoding="utf-8") as f:
return {"status": "success", "data": json.load(f)}
except:
return {"status": "error", "message": "Read error"}
# --- MCP ๋„๊ตฌ (ํ†ตํ•ฉ ์ง€๋Šฅ) ---
@mcp.tool()
def get_shared_history(limit: int = 15) -> str:
"""[ํ†ตํ•ฉ ๊ธฐ์–ต์žฅ์น˜] ๋‹ค๋ฅธ IDE๋‚˜ ๋‹ค๋ฅธ ์‹œ๊ฐ„๋Œ€์— ๋‚˜๋ˆˆ ๋ชจ๋“  ์ฑ„ํŒ… ๊ธฐ๋ก์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค. 'ํ•˜๋‚˜์˜ ๋‡Œ'์ฒ˜๋Ÿผ ์ž‘๋™ํ•˜๊ฒŒ ํ•ด์ค๋‹ˆ๋‹ค."""
log_path = "shared_chat_history.json"
if not os.path.exists(log_path) and os.path.exists(os.path.join("..", log_path)): log_path = os.path.join("..", log_path)
if not os.path.exists(log_path): return "๊ธฐ๋ก์ด ์•„์ง ์—†์Šต๋‹ˆ๋‹ค. ๋Œ€ํ™”๋ฅผ ์‹œ์ž‘ํ•˜๋ฉด ์ž๋™์œผ๋กœ ์ด๊ณณ์— ๋ชจ์ž…๋‹ˆ๋‹ค."
try:
with open(log_path, "r", encoding="utf-8") as f:
history = json.load(f)
recent = history[-limit:]
return "\n".join([f"[{e['timestamp']}] {e['role']} ({e['ide']}): {e['content']}" for e in recent])
except: return "๋กœ๊ทธ๋ฅผ ์ฝ๋Š” ๋ฐ ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค."
@mcp.tool()
def list_supported_models() -> str:
"""[๐Ÿ“‹๋ชจ๋ธ ๋ฆฌ์ŠคํŠธ] ํ˜„์žฌ ์‹œ์Šคํ…œ์—์„œ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋“  AI ๋ชจ๋ธ ๋ชฉ๋ก์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค."""
model_list = [
"--- Google Gemini ---",
"gemini-2.0-flash, gemini-2.0-pro, gemini-1.5-pro, gemini-2.5-alpha",
"",
"--- Anthropic Claude ---",
"claude-3.5-sonnet, claude-3.5-haiku, claude-3-opus",
"",
"--- OpenAI GPT ---",
"gpt-4o, gpt-4o-mini, o1-preview, o1-mini",
"",
"--- Perplexity (Search) ---",
"perplexity-sonar, perplexity-reasoning",
"",
"--- Groq (Llama, Fast) ---",
"llama-3.3-70b, llama-3.1-70b, mixtral-8x7b",
"",
"--- Hugging Face (Serverless) ---",
"hf/meta-llama/Llama-2-7b-hf, hf/mistralai/Mistral-7B-v0.1",
"",
"--- Other ---",
"grok, deepseek-v3"
]
return "\n".join(model_list)
@mcp.tool()
def ask_any_model(question: str, model_type: str = "gemini-2.0-flash") -> str:
"""[๐Ÿง ์˜ฌ์ธ์› ๋ธŒ๋ ˆ์ธ] ์ „ ์„ธ๊ณ„ ๋ชจ๋“  AI ๋ชจ๋ธ์„ ํ˜ธ์ถœํ•ฉ๋‹ˆ๋‹ค.
์ง€์›: gemini(2.0, 1.5), claude(3.5, Opus), gpt(4o, o1), grok, perplexity(sonar), llama(groq)"""
log_shared_chat("user", question, model_type)
# [REFINED] Authorized Model Pool
# Optimized for OpenRouter fallback to ensure high availability
model_map = {
# --- 5 IDE-Tier Models ---
"gemini-2.0-flash": "gemini/gemini-2.0-flash",
"gemini-2.0-pro": "gemini/gemini-2.0-pro-exp",
"gpt-4o": "openrouter/openai/gpt-4o",
"claude-3.5-sonnet": "openrouter/anthropic/claude-3.5-sonnet",
"gpt-4-turbo": "openrouter/openai/gpt-4-turbo",
# --- 3 Specialized Claude Models ---
"claude-3.5-haiku": "openrouter/anthropic/claude-3.5-haiku",
"claude-3-opus": "openrouter/anthropic/claude-3-opus",
# --- Grok & Others ---
"grok": "openrouter/x-ai/grok-2",
# --- Perplexity (Search AI) ---
"perplexity-sonar": "openrouter/perplexity/sonar",
# --- Groq (Fast Llama) ---
"llama-3.3-70b": "groq/llama-3.3-70b-versatile"
}
model_id = model_map.get(model_type, model_type)
provider = model_id.split('/')[0] if '/' in model_id else "unknown"
# 1. Inject Master Context (Real-time Learning from Files)
context_prompt = ""
try:
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
rules_path = os.path.join(project_root, "04_SYNC_DATA", "Master_Rules.json")
if os.path.exists(rules_path):
with open(rules_path, "r", encoding="utf-8") as f:
rules = json.load(f)
context_prompt += f"\n[Master Rules]: {json.dumps(rules, ensure_ascii=False)}"
except: pass
full_question = f"{context_prompt}\n\nCurrent User Request: {question}"
# ํ”Œ๋žซํผ๋ณ„ API ํ‚ค ์„ ํƒ ๋ฐ OpenRouter ์ž๋™ ํด๋ฐฑ
keys = api_inventory.get(provider, [])
# OpenRouter ์ „์šฉ ์ฒ˜๋ฆฌ
if provider == "openrouter" or (not keys and provider != "gemini"):
or_key = os.getenv("OPENROUTER_API_KEY")
if or_key:
keys = [or_key]
if not model_id.startswith("openrouter/"):
# Fallback mapping for OpenRouter
or_map = {
"anthropic": "openrouter/anthropic/",
"openai": "openrouter/openai/",
"xai": "openrouter/x-ai/",
"google": "openrouter/google/"
}
if provider in or_map:
model_id = model_id.replace(f"{provider}/", or_map[provider])
else:
model_id = f"openrouter/{model_id}"
if not keys and provider == "gemini":
keys = google_keys
# openrouter ํŠน์ • ๋ชจ๋ธ ์ฒ˜๋ฆฌ
if model_type.startswith("or/"):
model_id = "openrouter/" + model_type[3:]
keys = api_inventory.get("openrouter", [os.getenv("OPENROUTER_API_KEY")])
# ํ˜ธ์ถœ ์‹œ๋„ (์ตœ๋Œ€ ํ† ํฐ ์ œํ•œ์œผ๋กœ ์ฟผํ„ฐ ๋ณดํ˜ธ)
for k in (keys if keys else [""]):
try:
r = completion(
model=model_id,
messages=[
{"role": "system", "content": "You are Gemini Master, an autonomous AI system. You can save files by starting your response with 'SAVE_FILE:[path]' followed by the content. Example: 'SAVE_FILE:Rules/new_rule.json\n{...}'"},
{"role": "user", "content": full_question}
],
api_key=k if k else None,
timeout=30,
max_tokens=1000 # ์ตœ์ ํ™”: 1000ํ† ํฐ (๊ธฐ์ˆ  ๋‹ต๋ณ€ ์งค๋ฆผ ๋ฐฉ์ง€)
)
ans = r.choices[0].message.content
# [Smart Agent Interaction]
if ans.startswith("SAVE_FILE:"):
try:
file_info = ans.split("\n", 1)
header = file_info[0]
content = file_info[1] if len(file_info) > 1 else ""
target_path = header.replace("SAVE_FILE:", "").strip()
# Project Root normalization
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
full_save_path = os.path.join(project_root, target_path)
os.makedirs(os.path.dirname(full_save_path), exist_ok=True)
with open(full_save_path, "w", encoding="utf-8") as f:
f.write(content)
ans = f"โœ… [AGENT] File saved to: {target_path}\n\n" + ans
except Exception as fe:
ans = f"โš ๏ธ [AGENT] File save failed: {fe}\n\n" + ans
log_shared_chat("assistant", ans, model_type)
auto_save_safety_backup()
return ans
except Exception as e:
log(f"โš ๏ธ {model_id} ์‹คํŒจ: {str(e)[:50]}")
continue
return f"โŒ {model_type} ํ˜ธ์ถœ ์‹คํŒจ. API ํ‚ค๋ฅผ ํ™•์ธํ•˜์‹œ๊ฑฐ๋‚˜ ๋‹ค๋ฅธ ๋ชจ๋ธ์„ ์‹œ๋„ํ•ด ์ฃผ์„ธ์š”."
@mcp.tool()
def web_search_ddg(query: str) -> str:
"""[๐Ÿ”DuckDuckGo ๊ฒ€์ƒ‰] ์‹ค์‹œ๊ฐ„ ์›น ์ •๋ณด๋ฅผ ๊ฒ€์ƒ‰ํ•ฉ๋‹ˆ๋‹ค."""
try:
with DDGS() as ddgs:
results = [r for r in ddgs.text(query, max_results=5)]
if not results: return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
return "\n\n".join([f"[{r['title']}]\n{r['body']}\n({r['href']})" for r in results])
except Exception as e: return f"๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {e}"
@mcp.tool()
def web_search_tavily(query: str) -> str:
"""[๐Ÿง Tavily AI ๊ฒ€์ƒ‰] AI ์ตœ์ ํ™” ์›น ๊ฒ€์ƒ‰์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค (API ํ‚ค ํ•„์š”)."""
tav_key = os.getenv("TAVILY_API_KEY")
if not tav_key: return "TAVILY_API_KEY๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค."
try:
client = TavilyClient(api_key=tav_key)
result = client.search(query, search_depth="advanced")
if not result.get('results'): return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
return "\n\n".join([f"[{r['title']}]\n{r['content']}\n({r['url']})" for r in result['results'][:5]])
except Exception as e: return f"Tavily ๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {e}"
@mcp.tool()
def read_file(path: str) -> str:
"""ํŒŒ์ผ ๋‚ด์šฉ์„ ์ฝ์Šต๋‹ˆ๋‹ค."""
try:
with open(path, "r", encoding="utf-8") as f: return f.read()
except Exception as e: return str(e)
@mcp.tool()
def write_file(path: str, content: str) -> str:
"""ํŒŒ์ผ์„ ์ €์žฅํ•ฉ๋‹ˆ๋‹ค."""
try:
with open(path, "w", encoding="utf-8") as f: f.write(content); return f"โœ… ์ €์žฅ ์™„๋ฃŒ: {path}"
except Exception as e: return str(e)
# --- [NEW] AI Self-Learning Tools (RAG Feedback Loop) ---
@mcp.tool()
def record_new_trade(symbol: str, action: str, reason: str) -> str:
"""[ํ•™์Šต:๊ธฐ๋ก] ์ƒˆ๋กœ์šด ๋งค๋งค ์ง„์ž…์„ ๊ธฐ๋กํ•ฉ๋‹ˆ๋‹ค. ์˜ˆ: record_new_trade('BTC', 'BUY', 'RSI 30 touched')"""
return learning_engine.record_trade(symbol, action, reason)
@mcp.tool()
def update_trade_outcome(symbol: str, result: str, feedback: str) -> str:
"""[ํ•™์Šต:ํ”ผ๋“œ๋ฐฑ] ๋งค๋งค ๊ฒฐ๊ณผ๋ฅผ ํ‰๊ฐ€ํ•ฉ๋‹ˆ๋‹ค. ์˜ˆ: update_trade_outcome('BTC', 'WIN', 'Good RSI signal')"""
return learning_engine.update_trade_result(symbol, result, feedback)
@mcp.tool()
def get_market_wisdom(symbol: str = "ALL") -> str:
"""[ํ•™์Šต:ํšŒ์ƒ] ๊ณผ๊ฑฐ ๋งค๋งค ๊ธฐ๋ก์—์„œ ๊ตํ›ˆ์„ ์–ป์Šต๋‹ˆ๋‹ค."""
return learning_engine.get_trading_lessons(symbol)
@mcp.tool()
def get_crypto_price(symbol: str = "BTC") -> str:
"""[๐Ÿ“ˆ์‹ค์‹œ๊ฐ„ ์‹œ์„ธ] ์—…๋น„ํŠธ(KRW) ๋ฐ ๋ฐ”์ด๋‚ธ์Šค(USDT) ์‹ค์‹œ๊ฐ„ ๊ฐ€๊ฒฉ์„ ์กฐํšŒํ•ฉ๋‹ˆ๋‹ค. (API ํ‚ค ๋ถˆํ•„์š”)"""
results = []
try:
# 1. Upbit (Public API)
upbit_symbol = f"KRW-{symbol}"
r_up = requests.get(f"https://api.upbit.com/v1/ticker?markets={upbit_symbol}", timeout=5)
if r_up.status_code == 200:
data = r_up.json()[0]
results.append(f"Upbit: {data['trade_price']:,} KRW ({data['signed_change_rate']*100:+.2f}%)")
except: pass
try:
# 2. Binance (Public API)
bin_symbol = f"{symbol}USDT"
r_bin = requests.get(f"https://api.binance.com/api/v3/ticker/price?symbol={bin_symbol}", timeout=5)
if r_bin.status_code == 200:
data = r_bin.json()
results.append(f"Binance: ${float(data['price']):,.2f} USDT")
except: pass
if not results: return f"โŒ {symbol} ์‹œ์„ธ๋ฅผ ๋ถˆ๋Ÿฌ์˜ฌ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
return " | ".join(results)
@mcp.tool()
def get_market_news(query: str = "crypto market") -> str:
"""[๐Ÿ“ฐ๋‰ด์Šค ๋ธŒ๋ฆฌํ•‘] Investing.com, Reuters ๋“ฑ ์ฃผ์š” ๊ธˆ์œต ๋‰ด์Šค๋ฅผ ๊ฒ€์ƒ‰ํ•˜์—ฌ ๋ธŒ๋ฆฌํ•‘ํ•ฉ๋‹ˆ๋‹ค."""
specialized_query = f"site:investing.com OR site:reuters.com {query} news"
return web_search_ddg(specialized_query)
@mcp.tool()
def update_master_instruction(instruction: str, update_intelligence_state: str = None) -> str:
"""[๐Ÿ’Ž์ง€ํœ˜๊ด€ ์ง€์‹œ] ์‹œ์Šคํ…œ์˜ ํ•ต์‹ฌ ๊ทœ์น™(Master Rules)์— ์ƒˆ๋กœ์šด ์ง€์นจ์„ ์ถ”๊ฐ€ํ•˜๊ฑฐ๋‚˜ ์ง€๋Šฅ ์ƒํƒœ๋ฅผ ์—…๋ฐ์ดํŠธํ•ฉ๋‹ˆ๋‹ค."""
# ... (๊ธฐ์กด ๋กœ์ง ์œ ์ง€)
try:
from intelligent_asset_manager import IntelligenceAssetManager
iam = IntelligenceAssetManager()
iam.archive_asset("Command Update", instruction, "System Configuration Pivot", "Direct Master Rules Modification", ["Command", "Settings"])
except: pass
try:
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
rules_path = os.path.join(project_root, "04_SYNC_DATA", "Master_Rules.json")
rules = {}
if os.path.exists(rules_path):
with open(rules_path, "r", encoding="utf-8") as f: rules = json.load(f)
rules["last_sync"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
rules["instructions"] = instruction
if update_intelligence_state: rules["intelligence_state"] = update_intelligence_state
with open(rules_path, "w", encoding="utf-8") as f:
json.dump(rules, f, indent=2, ensure_ascii=False)
return f"โœ… ์ง€ํœ˜๊ด€ ์ง€์‹œ ๋ฐ˜์˜ ๋ฐ ์ž์  ์ž์‚ฐํ™” ์™„๋ฃŒ."
except Exception as e: return f"โŒ ์‹คํŒจ: {str(e)}"
@mcp.tool()
def capture_intelligence_asset(title: str, idea: str, value: str, strategy: str, tags: str = "Native") -> str:
"""[๐Ÿ’ก์ง€๋Šฅ ์ž์‚ฐํ™”] ํ˜„์žฌ์˜ ์ค‘์š”ํ•œ ์•„์ด๋””์–ด, ๊ฐ€์น˜, ๊ตฌํ˜„ ์ „๋žต์„ ์ง€๋Šฅ ์ž์‚ฐ์œผ๋กœ ์˜๊ตฌ ๋ฐ•์ œํ•ฉ๋‹ˆ๋‹ค.
์œˆ๋“œ์„œํ”„ ๋‚ด๋ถ€ ๋Œ€ํ™”๋ฅผ ์‹ค์‹œ๊ฐ„์œผ๋กœ ์ž์‚ฐํ™”ํ•˜๋Š” ๋ฐ ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค."""
try:
from intelligent_asset_manager import IntelligenceAssetManager
iam = IntelligenceAssetManager()
tag_list = [t.strip() for t in tags.split(",")]
file_path = iam.archive_asset(title, idea, value, strategy, tag_list)
return f"โœ… ์ง€๋Šฅ ์ž์‚ฐ ๋ฐ•์ œ ์™„๋ฃŒ: {os.path.basename(file_path)}"
except Exception as e:
return f"โŒ ์ž์‚ฐํ™” ์‹คํŒจ: {str(e)}"
@mcp.tool()
def system_emergency_patch(target_module: str, patch_content: str, reason: str = "Commander Order") -> str:
"""[โšก๊ธด๊ธ‰ ํŒจ์น˜] ์‹œ์Šคํ…œ ํ•ต์‹ฌ ๋ชจ๋“ˆ(HEALING_CORE, BOT_CORE ๋“ฑ)์— ๋Œ€ํ•œ ๊ธด๊ธ‰ ์ฝ”๋“œ๋ฅผ ์ฃผ์ž…ํ•ฉ๋‹ˆ๋‹ค.
์ง€ํœ˜๊ด€๋‹˜์ด '์ด๊ฑฐ ๊ณ ์ณ'๋ผ๊ณ  ํ•˜๋ฉด ์•ˆํ‹ฐ๊ทธ๋ž˜๋น„ํ‹ฐ๊ฐ€ ์ฆ‰์‹œ ์ด ๋„๊ตฌ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ฝ”๋“œ๋ฅผ ์ˆ˜์ˆ ํ•ฉ๋‹ˆ๋‹ค."""
valid_targets = {
"HEALING_CORE": "ANTIGRAVITY_HEALING_CORE.py",
"LEARNING_ENGINE": "01_CENTRAL_BRAIN/App/learning_engine.py",
"SERVER": "01_CENTRAL_BRAIN/App/server.py"
}
target_file = valid_targets.get(target_module.upper())
if not target_file:
return f"โŒ ์œ ํšจํ•˜์ง€ ์•Š์€ ํƒ€๊ฒŸ์ž…๋‹ˆ๋‹ค. ๊ฐ€๋Šฅ ๋ชฉ๋ก: {', '.join(valid_targets.keys())}"
try:
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
full_path = os.path.join(project_root, target_file)
# ๋ฐฑ์—… ์ƒ์„ฑ
ts = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
backup_path = f"{full_path}.{ts}.bak"
shutil.copy2(full_path, backup_path)
# ํŒจ์น˜ ์ ์šฉ (๋‹จ์ˆœ ๋ฎ์–ด์“ฐ๊ธฐ or ์Šค๋งˆํŠธ ๋ณ‘ํ•ฉ - ํ˜„์žฌ๋Š” ์•ˆ์ „์„ ์œ„ํ•ด ์ „์ฒด ๊ต์ฒด ์ „์šฉ์œผ๋กœ ์„ค๊ณ„)
# ์ฃผ์˜: patch_content๋Š” ์ „์ฒด ํŒŒ์ผ ๋‚ด์šฉ์ด์–ด์•ผ ํ•จ. ๋ถ€๋ถ„ ์ˆ˜์ •์€ ๋ณ„๋„ ๋กœ์ง ํ•„์š”.
with open(full_path, "w", encoding="utf-8") as f:
f.write(patch_content)
log(f"โšก [EMERGENCY PATCH] {target_module} ํŒจ์น˜ ์™„๋ฃŒ (Reason: {reason})")
return f"โœ… {target_module} ๊ธด๊ธ‰ ํŒจ์น˜ ์™„๋ฃŒ. ๋ฐฑ์—…: {os.path.basename(backup_path)}"
except Exception as e:
return f"โŒ ํŒจ์น˜ ์‹คํŒจ: {str(e)}"
# [Active Mining Loop] ์ž์œจ ์ž์‚ฐ ์ฑ„๊ตด ์Šค๋ ˆ๋“œ
def autonomous_asset_miner():
"""1์‹œ๊ฐ„๋งˆ๋‹ค ํ”„๋กœ์ ํŠธ๋ฅผ ์Šค์บ”ํ•˜์—ฌ ์ƒˆ๋กœ์šด ์•„์ด๋””์–ด/์•Œ๊ณ ๋ฆฌ์ฆ˜์„ ์Šค์Šค๋กœ ์ž์‚ฐํ™”ํ•ฉ๋‹ˆ๋‹ค."""
while True:
try:
time.sleep(3600) # 1์‹œ๊ฐ„ ์ฃผ๊ธฐ
log("๐Ÿ•ต๏ธโ€โ™‚๏ธ [MINER] ์ž์œจ ์ž์‚ฐ ์ฑ„๊ตด ์Šค์บ” ์‹œ์ž‘...")
# ์‹ค์ œ ์ฑ„๊ตด ๋กœ์ง์€ IntelligenceAssetManager์˜ ๋ฐฐ์น˜ ๋ชจ๋“œ๋ฅผ ํ˜ธ์ถœํ•  ์˜ˆ์ •
except: pass
# --- ์„œ๋ฒ„ ๊ตฌ๋™ ๋กœ์ง ---
def start_api():
# Render/IDX ๋“ฑ ํด๋ผ์šฐ๋“œ ํ™˜๊ฒฝ์—์„œ ํฌํŠธ ๋ฐ”์ธ๋”ฉ์„ ์œ„ํ•ด 0.0.0.0๊ณผ PORT ํ™˜๊ฒฝ๋ณ€์ˆ˜ ์‚ฌ์šฉ
port = int(os.getenv("PORT", 8000))
uvicorn.run(app, host="0.0.0.0", port=port, log_level="error")
if __name__ == "__main__":
log("\n" + "="*50)
log("๐Ÿš€ GEMINI MASTER: UNIFIED BRAIN ONLINE")
log(" [API] Dashboard Ready at http://localhost:8000")
log(" [MCP] Integrated Mode (Cloud/Local Adaptive)")
log("="*50)
# [IDX/Cloud/Windsurf ์ตœ์ ํ™”]
# Windsurf/Claude๋Š” isatty()๊ฐ€ False์ด๋ฏ€๋กœ ์ด๋ฅผ ํด๋ผ์šฐ๋“œ๋กœ ์˜คํŒํ•˜์ง€ ์•Š๊ฒŒ ์ˆ˜์ •
is_actually_cloud = os.getenv("IDX_WORKSPACE_ID") or os.getenv("RENDER") or os.getenv("PORT")
if is_actually_cloud:
log("โ˜๏ธ ํด๋ผ์šฐ๋“œ ํ™˜๊ฒฝ ๊ฐ์ง€: FastAPI(HTTP) ๋ชจ๋“œ๋กœ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค.")
# [NEW] Cloud Miner Start
miner_thread = threading.Thread(target=autonomous_asset_miner, daemon=True)
miner_thread.start()
start_api()
else:
log("๐Ÿ  ๋กœ์ปฌ/IDE ํ™˜๊ฒฝ: Stdio(MCP)์™€ HTTP(API)๋ฅผ ๋ณ‘๋ ฌ๋กœ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค.")
# API ์„œ๋ฒ„๋ฅผ ๋ณ„๋„ ์Šค๋ ˆ๋“œ์—์„œ ์‹คํ–‰
api_thread = threading.Thread(target=start_api, daemon=True)
api_thread.start()
# [NEW] Local Miner Start (Active Intelligence)
miner_thread = threading.Thread(target=autonomous_asset_miner, daemon=True)
miner_thread.start()
log("โ›๏ธ [MINER] ์ž์œจ ๊ฐ€์น˜ ์ฑ„๊ตด ์—”์ง„ ๊ฐ€๋™ (Cycle: 1h)")
try:
# Main ์Šค๋ ˆ๋“œ์—์„œ MCP Stdio ์„œ๋ฒ„ ์‹คํ–‰ (์œˆ๋“œ์„œํ”„ ์—ฐ๋™ ํ•ต์‹ฌ)
mcp.run()
except (KeyboardInterrupt, SystemExit):
log("๐Ÿ›‘ ์‹œ์Šคํ…œ ์ข…๋ฃŒ ์š”์ฒญ์„ ๋ฐ›์•˜์Šต๋‹ˆ๋‹ค.")
except Exception as e:
log(f"โš ๏ธ MCP ์‹คํ–‰ ์˜ค๋ฅ˜: {e}")
except Exception:
log("\nโŒ [FAIL]:")
traceback.print_exc()