|
|
import os |
|
|
import sys |
|
|
import traceback |
|
|
import re |
|
|
import shutil |
|
|
import subprocess |
|
|
import datetime |
|
|
import json |
|
|
import time |
|
|
import asyncio |
|
|
import threading |
|
|
import requests |
|
|
|
|
|
|
|
|
SERVER_START_TIME = time.time() |
|
|
|
|
|
|
|
|
try: |
|
|
sys.stderr.reconfigure(encoding='utf-8') |
|
|
sys.stdout.reconfigure(encoding='utf-8') |
|
|
except: |
|
|
pass |
|
|
print("DEBUG: server.py ํ์ผ์ด ์คํ๋์์ต๋๋ค.", file=sys.stderr, flush=True) |
|
|
|
|
|
|
|
|
try: |
|
|
def log(msg): |
|
|
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
|
print(f"[{timestamp}] {msg}", file=sys.stderr, flush=True) |
|
|
try: |
|
|
with open("server_log.txt", "a", encoding="utf-8") as f: |
|
|
f.write(f"[{timestamp}] {msg}\n") |
|
|
except: |
|
|
pass |
|
|
|
|
|
def auto_save_safety_backup(): |
|
|
"""ํต์ฌ ํ์ผ(๋ํ ๊ธฐ๋ก)์ ์์ ๊ตฌ์ญ์ ์ค์๊ฐ์ผ๋ก ๊ฐ๋ณ ๋ณด๊ดํฉ๋๋ค.""" |
|
|
try: |
|
|
|
|
|
safety_root = os.getenv("SAFETY_BACKUP_DIR", os.path.join(os.path.expanduser("~"), "Gemini_Safety_Backup")) |
|
|
if not os.path.exists(safety_root): os.makedirs(safety_root) |
|
|
|
|
|
ts = datetime.datetime.now().strftime("%Y%m%d_%H%M") |
|
|
src = "chat_history.json" |
|
|
if os.path.exists(os.path.join("..", src)): src = os.path.join("..", src) |
|
|
|
|
|
if os.path.exists(src): |
|
|
dst = os.path.join(safety_root, f"chat_history_{ts}.json") |
|
|
shutil.copy2(src, dst) |
|
|
src_env = ".env" |
|
|
if os.path.exists(os.path.join("..", ".env")): src_env = os.path.join("..", ".env") |
|
|
if os.path.exists(src_env): |
|
|
shutil.copy2(src_env, os.path.join(safety_root, f".env_{ts}")) |
|
|
return True |
|
|
except: pass |
|
|
return False |
|
|
|
|
|
def log_shared_chat(role, content, model_type="unknown"): |
|
|
"""๋ชจ๋ IDE์์ ๊ณต์ ๋๋ ํตํฉ ์ฑ ๋ก๊ทธ๋ฅผ ๊ธฐ๋กํฉ๋๋ค.""" |
|
|
try: |
|
|
|
|
|
actual_path = os.getenv("SHARED_CHAT_PATH", "shared_chat_history.json") |
|
|
|
|
|
history = [] |
|
|
if os.path.exists(actual_path): |
|
|
with open(actual_path, "r", encoding="utf-8") as f: |
|
|
try: |
|
|
history = json.load(f) |
|
|
except: |
|
|
history = [] |
|
|
|
|
|
entry = { |
|
|
"timestamp": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), |
|
|
"ide": os.getenv("IDE_NAME", "Gemini_Master_Node"), |
|
|
"model": model_type, |
|
|
"role": role, |
|
|
"content": content[:1500] |
|
|
} |
|
|
history.append(entry) |
|
|
if len(history) > 100: history = history[-100:] |
|
|
|
|
|
with open(actual_path, "w", encoding="utf-8") as f: |
|
|
json.dump(history, f, ensure_ascii=False, indent=2) |
|
|
except Exception as e: |
|
|
log(f"โ ๏ธ ํตํฉ ๋ก๊ทธ ๊ธฐ๋ก ์คํจ: {e}") |
|
|
|
|
|
log(f"๐ ํ์ฌ ์์น: {os.getcwd()}") |
|
|
log("๐งฉ [Expert] SWE-bench ๋ฐ ์์จ ์์ด์ ํธ ๋๊ตฌ ์ง์ ๋ชจ๋ ํ์ฑํ") |
|
|
|
|
|
|
|
|
|
|
|
from dotenv import load_dotenv |
|
|
|
|
|
def discover_and_load_envs(): |
|
|
"""ํ๋ก์ ํธ ๋ฃจํธ ๋ฐ ์์ ํด๋์์ .env ํ์ผ๋ค์ ์ฐพ์ ๋ก๋ํฉ๋๋ค.""" |
|
|
current = os.path.dirname(os.path.abspath(__file__)) |
|
|
loaded = [] |
|
|
|
|
|
for _ in range(4): |
|
|
for env_name in [".env", "GLOBAL_CONFIG.env"]: |
|
|
p = os.path.join(current, env_name) |
|
|
if os.path.exists(p): |
|
|
load_dotenv(p, override=True) |
|
|
loaded.append(p) |
|
|
current = os.path.dirname(current) |
|
|
return loaded |
|
|
|
|
|
envs = discover_and_load_envs() |
|
|
log(f"โ
์ค์ ํ์ผ ๋ก๋ ์๋ฃ: {len(envs)}๊ฐ ํ์ผ ๋ฐ๊ฒฌ") |
|
|
|
|
|
|
|
|
REQUIRED_KEYS = ["OPENROUTER_API_KEY", "RENDER_API_KEY", "SUPABASE_KEY", "HUGGINGFACE_TOKEN"] |
|
|
active_keys = [k for k in REQUIRED_KEYS if os.getenv(k)] |
|
|
log(f"๐ [SYSTEM] Active Cloud Keys: {len(active_keys)}/{len(REQUIRED_KEYS)} Loaded.") |
|
|
|
|
|
|
|
|
os.environ["OPENROUTER_API_KEY"] = os.getenv("OPENROUTER_API_KEY", "") |
|
|
os.environ["HUGGINGFACE_API_KEY"] = os.getenv("HUGGINGFACE_TOKEN", "") |
|
|
os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY", "") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SYSTEM_MODEL_MAP = { |
|
|
"fast": ["groq/llama3-8b-8192", "openrouter/meta-llama/llama-3-8b-instruct", "ollama/llama3"], |
|
|
"smart": ["groq/llama3-70b-8192", "openrouter/meta-llama/llama-3-70b-instruct", "ollama/qwen:14b"], |
|
|
"coding": ["groq/gemma2-9b-it", "openrouter/google/gemma-2-9b-it", "ollama/codegemma"] |
|
|
} |
|
|
|
|
|
log("๐ [1๋จ๊ณ] ๋ผ์ด๋ธ๋ฌ๋ฆฌ ๋ก๋ฉ ์ค...") |
|
|
|
|
|
from fastmcp import FastMCP |
|
|
try: |
|
|
import litellm |
|
|
from litellm import completion |
|
|
except ImportError: |
|
|
log("โ ๏ธ litellm missing, installing...") |
|
|
subprocess.check_call([sys.executable, "-m", "pip", "install", "litellm"]) |
|
|
from litellm import completion |
|
|
|
|
|
log("๐ [๋จ๊ณ] DuckDuckGo & Tavily ๋ก๋ฉ ์ค...") |
|
|
try: |
|
|
from duckduckgo_search import DDGS |
|
|
except ImportError: |
|
|
DDGS = None |
|
|
try: |
|
|
from tavily import TavilyClient |
|
|
except ImportError: |
|
|
TavilyClient = None |
|
|
|
|
|
from fastapi import FastAPI, Request |
|
|
from fastapi.middleware.cors import CORSMiddleware |
|
|
import uvicorn |
|
|
|
|
|
import learning_engine |
|
|
|
|
|
|
|
|
RULES_DIR = os.path.join(os.path.dirname(__file__), "Rules") |
|
|
if not os.path.exists(RULES_DIR): |
|
|
os.makedirs(os.path.join(RULES_DIR, "Trading")) |
|
|
os.makedirs(os.path.join(RULES_DIR, "Indicators")) |
|
|
os.makedirs(os.path.join(RULES_DIR, "Patterns")) |
|
|
|
|
|
|
|
|
try: |
|
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) |
|
|
sys.path.append(project_root) |
|
|
import system_blackbox |
|
|
log("๐ผ [BLACKBOX] Flight Recorder Armed & Recording...") |
|
|
except ImportError: |
|
|
log("โ ๏ธ [BLACKBOX] Recorder not found, flying without blackbox.") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"\nโ [์ค๋ฅ] ๋ผ์ด๋ธ๋ฌ๋ฆฌ ๋ก๋ฉ ์ค ์น๋ช
์ ์ค๋ฅ: {e}") |
|
|
traceback.print_exc() |
|
|
sys.exit(1) |
|
|
|
|
|
|
|
|
app = FastAPI(title="Gemini Master Hub API") |
|
|
app.add_middleware( |
|
|
CORSMiddleware, |
|
|
allow_origins=["*"], |
|
|
allow_methods=["*"], |
|
|
allow_headers=["*"], |
|
|
) |
|
|
|
|
|
|
|
|
mcp = FastMCP("Gemini Server") |
|
|
|
|
|
try: |
|
|
|
|
|
log(f"โ
ํ๊ฒฝ ์ค์ ์ต์ ํ ์๋ฃ") |
|
|
|
|
|
|
|
|
|
|
|
api_inventory = {"google": [], "xai": [], "openai": [], "perplexity": [], "groq": [], "anthropic": [], "openrouter": []} |
|
|
|
|
|
|
|
|
loaded_envs_files = [os.path.join(os.getcwd(), f) for f in [".env", "GLOBAL_CONFIG.env"] if os.path.exists(f)] |
|
|
|
|
|
for e_path in loaded_envs_files: |
|
|
try: |
|
|
with open(e_path, "r", encoding="utf-8") as f: content = f.read() |
|
|
except: |
|
|
continue |
|
|
|
|
|
for k in api_inventory.keys(): |
|
|
if k == "google": |
|
|
api_inventory[k] += re.findall(r'AIza[0-9A-Za-z\-_]{30,}', content) |
|
|
elif k == "openrouter": |
|
|
api_inventory[k] += re.findall(r'OPENROUTER_API_KEY=(sk-or-v1-[0-9A-Za-z\-_]+)', content) |
|
|
else: |
|
|
prefix = {"xai": "xai-", "openai": "sk-", "groq": "gsk_"}.get(k, "") |
|
|
if prefix: |
|
|
api_inventory[k] += re.findall(rf'{k.upper()}_API_KEY=({prefix}[0-9A-Za-z\-_]+)', content) |
|
|
|
|
|
google_keys = list(set(api_inventory["google"])) |
|
|
xai_keys = list(set(api_inventory["xai"])) |
|
|
log(f"โ
AI ์์ ํ๋ณด: Google({len(google_keys)}), Grok({len(xai_keys)})") |
|
|
|
|
|
|
|
|
@app.post("/api/generate_rules_by_keyword") |
|
|
async def api_generate_rules(request: Request): |
|
|
try: |
|
|
data = await request.json() |
|
|
kw = data.get("keyword", "AUTO") |
|
|
model = data.get("model", "gemini-2.0-flash") |
|
|
category = data.get("category", "Trading") |
|
|
|
|
|
log(f"๐ญ ๊ท์น ์์ฑ ์์ฒญ: {kw} (Model: {model}, Category: {category})") |
|
|
|
|
|
|
|
|
prompt = f"'{kw}'์ ๋ํ ์ ๋ฌธ์ ์ธ ํธ๋ ์ด๋ฉ ๊ท์น์ JSON ํ์์ผ๋ก ์์ฑํด์ค. ํฌํจํ ๋ด์ฉ: ์ง์
์กฐ๊ฑด(entry_conditions), ์ฒญ์ฐ์กฐ๊ฑด(exit_conditions), ์์ ๊ฐ(stop_loss), ์ต์ ๊ฐ(take_profit), ์ฃผ์์ฌํญ(risk_factors)." |
|
|
result = ask_any_model(prompt, model) |
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
clean_json = re.sub(r'```json\s*|\s*```', '', result).strip() |
|
|
rule_data = json.loads(clean_json) |
|
|
|
|
|
|
|
|
filename = f"{kw.replace(' ', '_')}_Rules.json" |
|
|
filepath = os.path.join(RULES_DIR, category, filename) |
|
|
|
|
|
with open(filepath, "w", encoding="utf-8") as f: |
|
|
json.dump(rule_data, f, indent=2, ensure_ascii=False) |
|
|
|
|
|
log(f"โ
๊ท์น ํ์ผ ์์ฑ ์๋ฃ: {filepath}") |
|
|
return {"status": "success", "message": f"๊ท์น ์์ฑ ์๋ฃ: {filename}", "data": rule_data} |
|
|
|
|
|
except Exception as e: |
|
|
log(f"โ ๏ธ JSON ๋ณํ ์คํจ, ํ
์คํธ๋ก ์ ์ฅ: {e}") |
|
|
|
|
|
filepath = os.path.join(RULES_DIR, category, f"{kw.replace(' ', '_')}_Raw.md") |
|
|
with open(filepath, "w", encoding="utf-8") as f: |
|
|
f.write(result) |
|
|
return {"status": "partial_success", "message": "JSON ํ์ฑ ์คํจ, ํ
์คํธ๋ก ์ ์ฅ๋จ", "data": result} |
|
|
|
|
|
except Exception as e: |
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
@app.get("/api/get_file_content") |
|
|
async def api_get_file(path: str): |
|
|
"""ํ์ผ ๋ด์ฉ ์ฝ๊ธฐ (๋ณด์์ ์ํด Gemini_Project ๋ด๋ถ ๊ฒฝ๋ก๋ง ํ์ฉ)""" |
|
|
try: |
|
|
|
|
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) |
|
|
|
|
|
if ":" not in path: |
|
|
full_path = os.path.join(project_root, path) |
|
|
else: |
|
|
full_path = path |
|
|
|
|
|
if os.path.exists(full_path): |
|
|
with open(full_path, "r", encoding="utf-8") as f: |
|
|
return {"status": "success", "content": f.read()} |
|
|
return {"status": "error", "message": f"File not found: {path}"} |
|
|
except Exception as e: |
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
@app.post("/api/save_custom_file") |
|
|
async def api_save_file(request: Request): |
|
|
try: |
|
|
data = await request.json() |
|
|
path = data.get("path") |
|
|
content = data.get("content") |
|
|
|
|
|
if not path.startswith("C:"): |
|
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) |
|
|
full_path = os.path.join(project_root, path) |
|
|
else: |
|
|
full_path = path |
|
|
|
|
|
os.makedirs(os.path.dirname(full_path), exist_ok=True) |
|
|
with open(full_path, "w", encoding="utf-8") as f: |
|
|
f.write(content) |
|
|
return {"status": "success", "message": f"Saved to {path}"} |
|
|
except Exception as e: |
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
@app.post("/api/execute_command") |
|
|
async def api_run_cmd(request: Request): |
|
|
"""์ปค๋งจ๋ ์คํ (VPN ์์, ์์คํ
์ฌ์์ ๋ฑ)""" |
|
|
try: |
|
|
data = await request.json() |
|
|
cmd = data.get("command") |
|
|
cwd = data.get("cwd", ".") |
|
|
|
|
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) |
|
|
full_cwd = os.path.join(project_root, cwd) |
|
|
|
|
|
|
|
|
subprocess.Popen(f"start cmd /k {cmd}", cwd=full_cwd, shell=True) |
|
|
return {"status": "success", "message": f"Command '{cmd}' executed in {cwd}"} |
|
|
except Exception as e: |
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
|
|
|
@app.post("/api/trade/record") |
|
|
async def api_record_trade(request: Request): |
|
|
"""๋งค๋งค ์ง์
๊ธฐ๋ก""" |
|
|
try: |
|
|
data = await request.json() |
|
|
res = learning_engine.record_trade( |
|
|
data.get("symbol"), |
|
|
data.get("action"), |
|
|
|
|
|
data.get("reason"), |
|
|
|
|
|
data.get("price", "Market") |
|
|
|
|
|
) |
|
|
|
|
|
return {"status": "success", "message": res} |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
|
|
|
|
|
|
@app.post("/api/trade/feedback") |
|
|
|
|
|
async def api_trade_feedback(request: Request): |
|
|
|
|
|
"""๋งค๋งค ๊ฒฐ๊ณผ ๋ฐ ํผ๋๋ฐฑ ์ ์ฅ""" |
|
|
|
|
|
try: |
|
|
|
|
|
data = await request.json() |
|
|
|
|
|
res = learning_engine.update_trade_result( |
|
|
|
|
|
data.get("symbol"), |
|
|
|
|
|
data.get("result"), |
|
|
|
|
|
data.get("feedback") |
|
|
|
|
|
) |
|
|
|
|
|
return {"status": "success", "message": res} |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
|
|
|
|
|
|
@app.get("/api/trade/lessons") |
|
|
|
|
|
async def api_get_lessons(symbol: str = "ALL"): |
|
|
|
|
|
"""๋งค๋งค ๊ตํ ์กฐํ""" |
|
|
|
|
|
try: |
|
|
|
|
|
res = learning_engine.get_trading_lessons(symbol) |
|
|
|
|
|
return {"status": "success", "data": res} |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/api/stats") |
|
|
|
|
|
async def get_brain_stats(): |
|
|
|
|
|
"""ํธ๋ ์ด๋ฉ ๋ธ๋ ์ธ์ ํ์ฌ ์ค์๊ฐ ์ํ(IQ, ๊ท์น ์) ๋ฐํ""" |
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
|
|
|
rule_dir = "04_SYNC_DATA/LEARNED_RULES" |
|
|
|
|
|
rules = [f for f in os.listdir(rule_dir) if f.endswith('.json')] if os.path.exists(rule_dir) else [] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
target_file = "04_SYNC_DATA/TARGET_ASSETS.json" |
|
|
|
|
|
targets = [] |
|
|
|
|
|
if os.path.exists(target_file): |
|
|
|
|
|
with open(target_file, "r") as f: |
|
|
|
|
|
targets = json.load(f) |
|
|
|
|
|
|
|
|
|
|
|
return { |
|
|
|
|
|
"status": "online", |
|
|
|
|
|
"brain_iq": len(rules) * 15 + 100, |
|
|
|
|
|
"total_rules": len(rules), |
|
|
|
|
|
"active_targets": [t.get('symbol') for t in targets], |
|
|
|
|
|
"last_update": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
|
|
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
|
|
|
|
|
|
@app.post("/api/chat") |
|
|
|
|
|
async def api_chat(request: Request): |
|
|
|
|
|
try: |
|
|
|
|
|
data = await request.json() |
|
|
|
|
|
msg = data.get("message", "") |
|
|
|
|
|
model = data.get("model", "gemini-2.0-flash") |
|
|
|
|
|
mode = data.get("mode", "chat") |
|
|
|
|
|
|
|
|
|
|
|
if not msg: return {"status": "error", "message": "No message provided"} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
system_prompt = "You are Gemini Master, the supreme orchestrator of all integrated platforms (Firebase, Vercel, Supabase, Notion, etc.)." |
|
|
|
|
|
|
|
|
|
|
|
if mode == "trade": |
|
|
|
|
|
system_prompt = "You are the Trading Sentinel. Accessing live market data and Supabase logs." |
|
|
|
|
|
|
|
|
|
|
|
prices = get_crypto_price("BTC") |
|
|
|
|
|
wisdom = learning_engine.get_trading_lessons("ALL") |
|
|
|
|
|
msg = f"[Live Market]: {prices}\n[Context: Trading Wisdom]\n{wisdom}\n\n[User Request]: {msg}" |
|
|
|
|
|
|
|
|
|
|
|
elif mode == "search": |
|
|
|
|
|
system_prompt = "You are the Quantum Search engine. Utilizing DuckDuckGo, Tavily, and Google Search." |
|
|
|
|
|
search_results = web_search_ddg(msg) |
|
|
|
|
|
msg = f"[Web Search Results]:\n{search_results}\n\n[User Request]: {msg}" |
|
|
|
|
|
|
|
|
|
|
|
elif mode == "app": |
|
|
|
|
|
app_id = data.get("app_id", "general") |
|
|
|
|
|
app_prompts = { |
|
|
|
|
|
"coder": "You are a Senior Software Engineer specializing in GitHub Actions, Codespaces, and Cursor SSH.", |
|
|
|
|
|
"analyst": "You are a Market Analyst. Use Supabase data and exchange APIs for real-time insights.", |
|
|
|
|
|
"notion": "You are a Knowledge Manager. Help organize thoughts into Notion databases and rules.", |
|
|
|
|
|
"cloud_master": "You are the Cloud Architect. Manage Vercel, Render, and Firebase deployments.", |
|
|
|
|
|
"trader_alpha": "You are an Elite Algorithmic Trader. Specialized in high-frequency trading rules." |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if app_id == "analyst": |
|
|
|
|
|
news = get_market_news("global market") |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
import glob |
|
|
|
|
|
rule_files = glob.glob("04_SYNC_DATA/LEARNED_RULES/*.json") |
|
|
|
|
|
loaded_rules = [] |
|
|
|
|
|
for rf in rule_files[-3:]: |
|
|
|
|
|
with open(rf, "r", encoding="utf-8") as f: |
|
|
|
|
|
loaded_rules.append(json.load(f)) |
|
|
|
|
|
rules_context = json.dumps(loaded_rules, ensure_ascii=False, indent=1) |
|
|
|
|
|
except: rules_context = "No rules yet." |
|
|
|
|
|
|
|
|
|
|
|
msg = f"[Context: Real-time News]\n{news}\n\n[Context: Auto-Generated Rules]\n{rules_context}\n\n[User Request]: {msg}" |
|
|
|
|
|
|
|
|
|
|
|
system_prompt = app_prompts.get(app_id, "You are a specialized AI assistant.") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
response = ask_any_model(msg, model) |
|
|
|
|
|
return {"status": "success", "response": response, "mode": mode} |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
return {"status": "error", "message": str(e)} |
|
|
|
|
|
|
|
|
|
|
|
@app.get("/api/apps/list") |
|
|
|
|
|
async def api_list_apps(): |
|
|
|
|
|
"""์ง์๋๋ AI ์ฑ/๋ชจ๋ ๋ฆฌ์คํธ ๋ฐํ""" |
|
|
|
|
|
return { |
|
|
|
|
|
"status": "success", |
|
|
|
|
|
"apps": [ |
|
|
|
|
|
{"id": "chat", "name": "Standard Chat", "icon": "๐ฌ", "desc": "Universal Intelligence"}, |
|
|
|
|
|
{"id": "trade", "name": "Trading Sentinel", "icon": "๐", "desc": "Market & Exchange Hub"}, |
|
|
|
|
|
{"id": "search", "name": "Quantum Search", "icon": "๐", "desc": "Tavily + DDG Global Search"}, |
|
|
|
|
|
{"id": "app", "name": "App: Cloud Architect", "icon": "โ๏ธ", "app_id": "cloud_master"}, |
|
|
|
|
|
{"id": "app", "name": "App: Code Expert (SSH)", "icon": "๐ป", "app_id": "coder"}, |
|
|
|
|
|
{"id": "app", "name": "App: Notion Agent", "icon": "๐", "app_id": "notion"}, |
|
|
|
|
|
{"id": "app", "name": "App: Market Analyst", "icon": "๐", "app_id": "analyst"}, |
|
|
|
|
|
{"id": "app", "name": "App: Elite Trader", "icon": "๐ฆ
", "app_id": "trader_alpha"} |
|
|
|
|
|
] |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
@app.get("/api/debate/suggest") |
|
|
|
|
|
async def api_debate_suggest(): |
|
|
|
|
|
"""๋ฅ๋ง์ธ๋ ๋ธ๋ ์ธ์ด ํ์ฌ ํ๋ก์ ํธ/์์ฅ ์ํฉ์ ๋ง๋ ํ ๋ก ์ฃผ์ ๋ฅผ ์ ์""" |
|
|
|
|
|
prompt = "ํ์ฌ ์ธ๊ณต์ง๋ฅ ํธ๋ ์ด๋ฉ ์์คํ
์ ๊ตฌ์ถ ์ค์ด์ผ. ์ฐ๋ฆฌ '๋ฅ๋ง์ธ๋ ์์ํ(The Council)'๊ฐ ํ ๋ก ํ ๋งํ ์์ฃผ ๋ ์นด๋กญ๊ณ ํ์ ์ ์ธ ์ฃผ์ 3๊ฐ์ง๋ง ์ ์ํด์ค. (์: ๊ธฐ์ ์ ๋ถ์ vs ์จ์ฒด์ธ ๋ฐ์ดํฐ, ํํธ ์ ๋ต์ ํ๊ณ ๋ฑ)" |
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
|
|
|
suggestion = ask_any_model(prompt, "gemini-2.0-flash") |
|
|
|
|
|
return {"status": "success", "suggestions": suggestion} |
|
|
|
|
|
except: |
|
|
|
|
|
return {"status": "error", "message": "Failed to generate suggestions"} |
|
|
|
|
|
|
|
|
|
|
|
@app.get("/api/insights/latest") |
|
|
|
|
|
async def api_get_insights(refresh: bool = False): |
|
|
|
|
|
"""์์จ๋ถ์ ์์ด์ ํธ์ ์ต์ ์ธ์ฌ์ดํธ ๋ฐํ""" |
|
|
|
|
|
from .autonomous_analyst import generate_autonomous_insight, INSIGHTS_FILE |
|
|
|
|
|
|
|
|
|
|
|
if refresh or not os.path.exists(INSIGHTS_FILE): |
|
|
|
|
|
insight = generate_autonomous_insight() |
|
|
|
|
|
if not insight: return {"status": "error", "message": "Analysis failed"} |
|
|
|
|
|
return {"status": "success", "data": insight} |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
with open(INSIGHTS_FILE, "r", encoding="utf-8") as f: |
|
|
|
|
|
return {"status": "success", "data": json.load(f)} |
|
|
|
|
|
except: |
|
|
|
|
|
return {"status": "error", "message": "Read error"} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def get_shared_history(limit: int = 15) -> str: |
|
|
|
|
|
"""[ํตํฉ ๊ธฐ์ต์ฅ์น] ๋ค๋ฅธ IDE๋ ๋ค๋ฅธ ์๊ฐ๋์ ๋๋ ๋ชจ๋ ์ฑํ
๊ธฐ๋ก์ ๋ถ๋ฌ์ต๋๋ค. 'ํ๋์ ๋'์ฒ๋ผ ์๋ํ๊ฒ ํด์ค๋๋ค.""" |
|
|
|
|
|
log_path = "shared_chat_history.json" |
|
|
|
|
|
if not os.path.exists(log_path) and os.path.exists(os.path.join("..", log_path)): log_path = os.path.join("..", log_path) |
|
|
|
|
|
if not os.path.exists(log_path): return "๊ธฐ๋ก์ด ์์ง ์์ต๋๋ค. ๋ํ๋ฅผ ์์ํ๋ฉด ์๋์ผ๋ก ์ด๊ณณ์ ๋ชจ์
๋๋ค." |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
with open(log_path, "r", encoding="utf-8") as f: |
|
|
|
|
|
history = json.load(f) |
|
|
|
|
|
recent = history[-limit:] |
|
|
|
|
|
return "\n".join([f"[{e['timestamp']}] {e['role']} ({e['ide']}): {e['content']}" for e in recent]) |
|
|
|
|
|
except: return "๋ก๊ทธ๋ฅผ ์ฝ๋ ๋ฐ ์คํจํ์ต๋๋ค." |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def list_supported_models() -> str: |
|
|
|
|
|
"""[๐๋ชจ๋ธ ๋ฆฌ์คํธ] ํ์ฌ ์์คํ
์์ ์ฌ์ฉ ๊ฐ๋ฅํ ๋ชจ๋ AI ๋ชจ๋ธ ๋ชฉ๋ก์ ๋ณด์ฌ์ค๋๋ค.""" |
|
|
|
|
|
model_list = [ |
|
|
|
|
|
"--- Google Gemini ---", |
|
|
|
|
|
"gemini-2.0-flash, gemini-2.0-pro, gemini-1.5-pro, gemini-2.5-alpha", |
|
|
|
|
|
"", |
|
|
|
|
|
"--- Anthropic Claude ---", |
|
|
|
|
|
"claude-3.5-sonnet, claude-3.5-haiku, claude-3-opus", |
|
|
|
|
|
"", |
|
|
|
|
|
"--- OpenAI GPT ---", |
|
|
|
|
|
"gpt-4o, gpt-4o-mini, o1-preview, o1-mini", |
|
|
|
|
|
"", |
|
|
|
|
|
"--- Perplexity (Search) ---", |
|
|
|
|
|
"perplexity-sonar, perplexity-reasoning", |
|
|
|
|
|
"", |
|
|
|
|
|
"--- Groq (Llama, Fast) ---", |
|
|
"llama-3.3-70b, llama-3.1-70b, mixtral-8x7b", |
|
|
"", |
|
|
"--- Hugging Face (Serverless) ---", |
|
|
"hf/meta-llama/Llama-2-7b-hf, hf/mistralai/Mistral-7B-v0.1", |
|
|
"", |
|
|
"--- Other ---", |
|
|
"grok, deepseek-v3" |
|
|
] |
|
|
return "\n".join(model_list) |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def ask_any_model(question: str, model_type: str = "gemini-2.0-flash") -> str: |
|
|
|
|
|
"""[๐ง ์ฌ์ธ์ ๋ธ๋ ์ธ] ์ ์ธ๊ณ ๋ชจ๋ AI ๋ชจ๋ธ์ ํธ์ถํฉ๋๋ค. |
|
|
|
|
|
์ง์: gemini(2.0, 1.5), claude(3.5, Opus), gpt(4o, o1), grok, perplexity(sonar), llama(groq)""" |
|
|
log_shared_chat("user", question, model_type) |
|
|
|
|
|
|
|
|
|
|
|
model_map = { |
|
|
|
|
|
"gemini-2.0-flash": "gemini/gemini-2.0-flash", |
|
|
"gemini-2.0-pro": "gemini/gemini-2.0-pro-exp", |
|
|
"gpt-4o": "openrouter/openai/gpt-4o", |
|
|
"claude-3.5-sonnet": "openrouter/anthropic/claude-3.5-sonnet", |
|
|
"gpt-4-turbo": "openrouter/openai/gpt-4-turbo", |
|
|
|
|
|
|
|
|
"claude-3.5-haiku": "openrouter/anthropic/claude-3.5-haiku", |
|
|
"claude-3-opus": "openrouter/anthropic/claude-3-opus", |
|
|
|
|
|
|
|
|
"grok": "openrouter/x-ai/grok-2", |
|
|
|
|
|
|
|
|
"perplexity-sonar": "openrouter/perplexity/sonar", |
|
|
|
|
|
|
|
|
"llama-3.3-70b": "groq/llama-3.3-70b-versatile" |
|
|
} |
|
|
|
|
|
model_id = model_map.get(model_type, model_type) |
|
|
provider = model_id.split('/')[0] if '/' in model_id else "unknown" |
|
|
|
|
|
|
|
|
context_prompt = "" |
|
|
try: |
|
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) |
|
|
rules_path = os.path.join(project_root, "04_SYNC_DATA", "Master_Rules.json") |
|
|
if os.path.exists(rules_path): |
|
|
with open(rules_path, "r", encoding="utf-8") as f: |
|
|
rules = json.load(f) |
|
|
context_prompt += f"\n[Master Rules]: {json.dumps(rules, ensure_ascii=False)}" |
|
|
except: pass |
|
|
|
|
|
full_question = f"{context_prompt}\n\nCurrent User Request: {question}" |
|
|
|
|
|
|
|
|
keys = api_inventory.get(provider, []) |
|
|
|
|
|
|
|
|
if provider == "openrouter" or (not keys and provider != "gemini"): |
|
|
or_key = os.getenv("OPENROUTER_API_KEY") |
|
|
if or_key: |
|
|
keys = [or_key] |
|
|
if not model_id.startswith("openrouter/"): |
|
|
|
|
|
or_map = { |
|
|
"anthropic": "openrouter/anthropic/", |
|
|
"openai": "openrouter/openai/", |
|
|
"xai": "openrouter/x-ai/", |
|
|
"google": "openrouter/google/" |
|
|
} |
|
|
if provider in or_map: |
|
|
model_id = model_id.replace(f"{provider}/", or_map[provider]) |
|
|
else: |
|
|
model_id = f"openrouter/{model_id}" |
|
|
|
|
|
if not keys and provider == "gemini": |
|
|
keys = google_keys |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if model_type.startswith("or/"): |
|
|
|
|
|
model_id = "openrouter/" + model_type[3:] |
|
|
|
|
|
keys = api_inventory.get("openrouter", [os.getenv("OPENROUTER_API_KEY")]) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for k in (keys if keys else [""]): |
|
|
|
|
|
try: |
|
|
|
|
|
r = completion( |
|
|
|
|
|
model=model_id, |
|
|
|
|
|
messages=[ |
|
|
|
|
|
{"role": "system", "content": "You are Gemini Master, an autonomous AI system. You can save files by starting your response with 'SAVE_FILE:[path]' followed by the content. Example: 'SAVE_FILE:Rules/new_rule.json\n{...}'"}, |
|
|
|
|
|
{"role": "user", "content": full_question} |
|
|
|
|
|
], |
|
|
|
|
|
api_key=k if k else None, |
|
|
|
|
|
timeout=30, |
|
|
|
|
|
max_tokens=1000 |
|
|
|
|
|
) |
|
|
|
|
|
ans = r.choices[0].message.content |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if ans.startswith("SAVE_FILE:"): |
|
|
|
|
|
try: |
|
|
|
|
|
file_info = ans.split("\n", 1) |
|
|
|
|
|
header = file_info[0] |
|
|
|
|
|
content = file_info[1] if len(file_info) > 1 else "" |
|
|
|
|
|
target_path = header.replace("SAVE_FILE:", "").strip() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) |
|
|
|
|
|
full_save_path = os.path.join(project_root, target_path) |
|
|
|
|
|
|
|
|
|
|
|
os.makedirs(os.path.dirname(full_save_path), exist_ok=True) |
|
|
|
|
|
with open(full_save_path, "w", encoding="utf-8") as f: |
|
|
|
|
|
f.write(content) |
|
|
|
|
|
ans = f"โ
[AGENT] File saved to: {target_path}\n\n" + ans |
|
|
|
|
|
except Exception as fe: |
|
|
|
|
|
ans = f"โ ๏ธ [AGENT] File save failed: {fe}\n\n" + ans |
|
|
|
|
|
|
|
|
|
|
|
log_shared_chat("assistant", ans, model_type) |
|
|
|
|
|
auto_save_safety_backup() |
|
|
|
|
|
return ans |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
log(f"โ ๏ธ {model_id} ์คํจ: {str(e)[:50]}") |
|
|
|
|
|
continue |
|
|
|
|
|
return f"โ {model_type} ํธ์ถ ์คํจ. API ํค๋ฅผ ํ์ธํ์๊ฑฐ๋ ๋ค๋ฅธ ๋ชจ๋ธ์ ์๋ํด ์ฃผ์ธ์." |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def web_search_ddg(query: str) -> str: |
|
|
|
|
|
"""[๐DuckDuckGo ๊ฒ์] ์ค์๊ฐ ์น ์ ๋ณด๋ฅผ ๊ฒ์ํฉ๋๋ค.""" |
|
|
|
|
|
try: |
|
|
|
|
|
with DDGS() as ddgs: |
|
|
|
|
|
results = [r for r in ddgs.text(query, max_results=5)] |
|
|
|
|
|
if not results: return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค." |
|
|
|
|
|
return "\n\n".join([f"[{r['title']}]\n{r['body']}\n({r['href']})" for r in results]) |
|
|
|
|
|
except Exception as e: return f"๊ฒ์ ์ค ์ค๋ฅ ๋ฐ์: {e}" |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def web_search_tavily(query: str) -> str: |
|
|
|
|
|
"""[๐ง Tavily AI ๊ฒ์] AI ์ต์ ํ ์น ๊ฒ์์ ์ํํฉ๋๋ค (API ํค ํ์).""" |
|
|
|
|
|
tav_key = os.getenv("TAVILY_API_KEY") |
|
|
|
|
|
if not tav_key: return "TAVILY_API_KEY๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค." |
|
|
|
|
|
try: |
|
|
|
|
|
client = TavilyClient(api_key=tav_key) |
|
|
|
|
|
result = client.search(query, search_depth="advanced") |
|
|
|
|
|
if not result.get('results'): return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค." |
|
|
|
|
|
return "\n\n".join([f"[{r['title']}]\n{r['content']}\n({r['url']})" for r in result['results'][:5]]) |
|
|
|
|
|
except Exception as e: return f"Tavily ๊ฒ์ ์ค ์ค๋ฅ ๋ฐ์: {e}" |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def read_file(path: str) -> str: |
|
|
|
|
|
"""ํ์ผ ๋ด์ฉ์ ์ฝ์ต๋๋ค.""" |
|
|
|
|
|
try: |
|
|
|
|
|
with open(path, "r", encoding="utf-8") as f: return f.read() |
|
|
|
|
|
except Exception as e: return str(e) |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def write_file(path: str, content: str) -> str: |
|
|
|
|
|
"""ํ์ผ์ ์ ์ฅํฉ๋๋ค.""" |
|
|
|
|
|
try: |
|
|
|
|
|
with open(path, "w", encoding="utf-8") as f: f.write(content); return f"โ
์ ์ฅ ์๋ฃ: {path}" |
|
|
|
|
|
except Exception as e: return str(e) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def record_new_trade(symbol: str, action: str, reason: str) -> str: |
|
|
|
|
|
"""[ํ์ต:๊ธฐ๋ก] ์๋ก์ด ๋งค๋งค ์ง์
์ ๊ธฐ๋กํฉ๋๋ค. ์: record_new_trade('BTC', 'BUY', 'RSI 30 touched')""" |
|
|
|
|
|
return learning_engine.record_trade(symbol, action, reason) |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def update_trade_outcome(symbol: str, result: str, feedback: str) -> str: |
|
|
|
|
|
"""[ํ์ต:ํผ๋๋ฐฑ] ๋งค๋งค ๊ฒฐ๊ณผ๋ฅผ ํ๊ฐํฉ๋๋ค. ์: update_trade_outcome('BTC', 'WIN', 'Good RSI signal')""" |
|
|
|
|
|
return learning_engine.update_trade_result(symbol, result, feedback) |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def get_market_wisdom(symbol: str = "ALL") -> str: |
|
|
|
|
|
"""[ํ์ต:ํ์] ๊ณผ๊ฑฐ ๋งค๋งค ๊ธฐ๋ก์์ ๊ตํ์ ์ป์ต๋๋ค.""" |
|
|
|
|
|
return learning_engine.get_trading_lessons(symbol) |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def get_crypto_price(symbol: str = "BTC") -> str: |
|
|
|
|
|
"""[๐์ค์๊ฐ ์์ธ] ์
๋นํธ(KRW) ๋ฐ ๋ฐ์ด๋ธ์ค(USDT) ์ค์๊ฐ ๊ฐ๊ฒฉ์ ์กฐํํฉ๋๋ค. (API ํค ๋ถํ์)""" |
|
|
|
|
|
results = [] |
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
|
|
|
upbit_symbol = f"KRW-{symbol}" |
|
|
|
|
|
r_up = requests.get(f"https://api.upbit.com/v1/ticker?markets={upbit_symbol}", timeout=5) |
|
|
|
|
|
if r_up.status_code == 200: |
|
|
|
|
|
data = r_up.json()[0] |
|
|
|
|
|
results.append(f"Upbit: {data['trade_price']:,} KRW ({data['signed_change_rate']*100:+.2f}%)") |
|
|
|
|
|
except: pass |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
|
|
|
bin_symbol = f"{symbol}USDT" |
|
|
|
|
|
r_bin = requests.get(f"https://api.binance.com/api/v3/ticker/price?symbol={bin_symbol}", timeout=5) |
|
|
|
|
|
if r_bin.status_code == 200: |
|
|
|
|
|
data = r_bin.json() |
|
|
|
|
|
results.append(f"Binance: ${float(data['price']):,.2f} USDT") |
|
|
|
|
|
except: pass |
|
|
|
|
|
|
|
|
|
|
|
if not results: return f"โ {symbol} ์์ธ๋ฅผ ๋ถ๋ฌ์ฌ ์ ์์ต๋๋ค." |
|
|
|
|
|
return " | ".join(results) |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
|
|
|
def get_market_news(query: str = "crypto market") -> str: |
|
|
|
|
|
"""[๐ฐ๋ด์ค ๋ธ๋ฆฌํ] Investing.com, Reuters ๋ฑ ์ฃผ์ ๊ธ์ต ๋ด์ค๋ฅผ ๊ฒ์ํ์ฌ ๋ธ๋ฆฌํํฉ๋๋ค.""" |
|
|
|
|
|
specialized_query = f"site:investing.com OR site:reuters.com {query} news" |
|
|
|
|
|
return web_search_ddg(specialized_query) |
|
|
|
|
|
|
|
|
|
|
|
@mcp.tool() |
|
|
def update_master_instruction(instruction: str, update_intelligence_state: str = None) -> str: |
|
|
"""[๐์งํ๊ด ์ง์] ์์คํ
์ ํต์ฌ ๊ท์น(Master Rules)์ ์๋ก์ด ์ง์นจ์ ์ถ๊ฐํ๊ฑฐ๋ ์ง๋ฅ ์ํ๋ฅผ ์
๋ฐ์ดํธํฉ๋๋ค.""" |
|
|
|
|
|
try: |
|
|
from intelligent_asset_manager import IntelligenceAssetManager |
|
|
iam = IntelligenceAssetManager() |
|
|
iam.archive_asset("Command Update", instruction, "System Configuration Pivot", "Direct Master Rules Modification", ["Command", "Settings"]) |
|
|
except: pass |
|
|
|
|
|
try: |
|
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) |
|
|
rules_path = os.path.join(project_root, "04_SYNC_DATA", "Master_Rules.json") |
|
|
rules = {} |
|
|
if os.path.exists(rules_path): |
|
|
with open(rules_path, "r", encoding="utf-8") as f: rules = json.load(f) |
|
|
rules["last_sync"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
|
rules["instructions"] = instruction |
|
|
if update_intelligence_state: rules["intelligence_state"] = update_intelligence_state |
|
|
with open(rules_path, "w", encoding="utf-8") as f: |
|
|
json.dump(rules, f, indent=2, ensure_ascii=False) |
|
|
return f"โ
์งํ๊ด ์ง์ ๋ฐ์ ๋ฐ ์์ ์์ฐํ ์๋ฃ." |
|
|
except Exception as e: return f"โ ์คํจ: {str(e)}" |
|
|
|
|
|
@mcp.tool() |
|
|
def capture_intelligence_asset(title: str, idea: str, value: str, strategy: str, tags: str = "Native") -> str: |
|
|
"""[๐ก์ง๋ฅ ์์ฐํ] ํ์ฌ์ ์ค์ํ ์์ด๋์ด, ๊ฐ์น, ๊ตฌํ ์ ๋ต์ ์ง๋ฅ ์์ฐ์ผ๋ก ์๊ตฌ ๋ฐ์ ํฉ๋๋ค. |
|
|
์๋์ํ ๋ด๋ถ ๋ํ๋ฅผ ์ค์๊ฐ์ผ๋ก ์์ฐํํ๋ ๋ฐ ์ฌ์ฉ๋ฉ๋๋ค.""" |
|
|
try: |
|
|
from intelligent_asset_manager import IntelligenceAssetManager |
|
|
iam = IntelligenceAssetManager() |
|
|
tag_list = [t.strip() for t in tags.split(",")] |
|
|
file_path = iam.archive_asset(title, idea, value, strategy, tag_list) |
|
|
return f"โ
์ง๋ฅ ์์ฐ ๋ฐ์ ์๋ฃ: {os.path.basename(file_path)}" |
|
|
except Exception as e: |
|
|
return f"โ ์์ฐํ ์คํจ: {str(e)}" |
|
|
|
|
|
@mcp.tool() |
|
|
def system_emergency_patch(target_module: str, patch_content: str, reason: str = "Commander Order") -> str: |
|
|
"""[โก๊ธด๊ธ ํจ์น] ์์คํ
ํต์ฌ ๋ชจ๋(HEALING_CORE, BOT_CORE ๋ฑ)์ ๋ํ ๊ธด๊ธ ์ฝ๋๋ฅผ ์ฃผ์
ํฉ๋๋ค. |
|
|
์งํ๊ด๋์ด '์ด๊ฑฐ ๊ณ ์ณ'๋ผ๊ณ ํ๋ฉด ์ํฐ๊ทธ๋๋นํฐ๊ฐ ์ฆ์ ์ด ๋๊ตฌ๋ฅผ ์ฌ์ฉํ์ฌ ์ฝ๋๋ฅผ ์์ ํฉ๋๋ค.""" |
|
|
valid_targets = { |
|
|
"HEALING_CORE": "ANTIGRAVITY_HEALING_CORE.py", |
|
|
"LEARNING_ENGINE": "01_CENTRAL_BRAIN/App/learning_engine.py", |
|
|
"SERVER": "01_CENTRAL_BRAIN/App/server.py" |
|
|
} |
|
|
|
|
|
target_file = valid_targets.get(target_module.upper()) |
|
|
if not target_file: |
|
|
return f"โ ์ ํจํ์ง ์์ ํ๊ฒ์
๋๋ค. ๊ฐ๋ฅ ๋ชฉ๋ก: {', '.join(valid_targets.keys())}" |
|
|
|
|
|
try: |
|
|
project_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) |
|
|
full_path = os.path.join(project_root, target_file) |
|
|
|
|
|
|
|
|
ts = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") |
|
|
backup_path = f"{full_path}.{ts}.bak" |
|
|
shutil.copy2(full_path, backup_path) |
|
|
|
|
|
|
|
|
|
|
|
with open(full_path, "w", encoding="utf-8") as f: |
|
|
f.write(patch_content) |
|
|
|
|
|
log(f"โก [EMERGENCY PATCH] {target_module} ํจ์น ์๋ฃ (Reason: {reason})") |
|
|
return f"โ
{target_module} ๊ธด๊ธ ํจ์น ์๋ฃ. ๋ฐฑ์
: {os.path.basename(backup_path)}" |
|
|
except Exception as e: |
|
|
return f"โ ํจ์น ์คํจ: {str(e)}" |
|
|
|
|
|
def autonomous_asset_miner(): |
|
|
"""1์๊ฐ๋ง๋ค ํ๋ก์ ํธ๋ฅผ ์ค์บํ์ฌ ์๋ก์ด ์์ด๋์ด/์๊ณ ๋ฆฌ์ฆ์ ์ค์ค๋ก ์์ฐํํฉ๋๋ค.""" |
|
|
while True: |
|
|
try: |
|
|
time.sleep(3600) |
|
|
log("๐ต๏ธโโ๏ธ [MINER] ์์จ ์์ฐ ์ฑ๊ตด ์ค์บ ์์...") |
|
|
|
|
|
except: pass |
|
|
|
|
|
|
|
|
|
|
|
def start_api(): |
|
|
|
|
|
|
|
|
|
|
|
port = int(os.getenv("PORT", 8000)) |
|
|
|
|
|
uvicorn.run(app, host="0.0.0.0", port=port, log_level="error") |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
log("\n" + "="*50) |
|
|
|
|
|
log("๐ GEMINI MASTER: UNIFIED BRAIN ONLINE") |
|
|
|
|
|
log(" [API] Dashboard Ready at http://localhost:8000") |
|
|
|
|
|
log(" [MCP] Integrated Mode (Cloud/Local Adaptive)") |
|
|
|
|
|
log("="*50) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
is_actually_cloud = os.getenv("IDX_WORKSPACE_ID") or os.getenv("RENDER") or os.getenv("PORT") |
|
|
|
|
|
|
|
|
|
|
|
if is_actually_cloud: |
|
|
log("โ๏ธ ํด๋ผ์ฐ๋ ํ๊ฒฝ ๊ฐ์ง: FastAPI(HTTP) ๋ชจ๋๋ก ์คํํฉ๋๋ค.") |
|
|
|
|
|
|
|
|
miner_thread = threading.Thread(target=autonomous_asset_miner, daemon=True) |
|
|
miner_thread.start() |
|
|
|
|
|
start_api() |
|
|
|
|
|
else: |
|
|
|
|
|
log("๐ ๋ก์ปฌ/IDE ํ๊ฒฝ: Stdio(MCP)์ HTTP(API)๋ฅผ ๋ณ๋ ฌ๋ก ์คํํฉ๋๋ค.") |
|
|
|
|
|
|
|
|
|
|
|
api_thread = threading.Thread(target=start_api, daemon=True) |
|
|
|
|
|
api_thread.start() |
|
|
|
|
|
|
|
|
miner_thread = threading.Thread(target=autonomous_asset_miner, daemon=True) |
|
|
miner_thread.start() |
|
|
log("โ๏ธ [MINER] ์์จ ๊ฐ์น ์ฑ๊ตด ์์ง ๊ฐ๋ (Cycle: 1h)") |
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
|
|
|
mcp.run() |
|
|
|
|
|
except (KeyboardInterrupt, SystemExit): |
|
|
|
|
|
log("๐ ์์คํ
์ข
๋ฃ ์์ฒญ์ ๋ฐ์์ต๋๋ค.") |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
log(f"โ ๏ธ MCP ์คํ ์ค๋ฅ: {e}") |
|
|
|
|
|
|
|
|
|
|
|
except Exception: |
|
|
|
|
|
log("\nโ [FAIL]:") |
|
|
|
|
|
traceback.print_exc() |