|
|
""" |
|
|
CRANE AI - Ana Sistem (yeniden oluşturuldu) |
|
|
""" |
|
|
|
|
|
import asyncio |
|
|
import logging |
|
|
import socket |
|
|
from typing import Dict, Any, List |
|
|
from datetime import datetime |
|
|
from contextlib import asynccontextmanager |
|
|
|
|
|
from fastapi import FastAPI, HTTPException |
|
|
from fastapi.middleware.cors import CORSMiddleware |
|
|
from pydantic import BaseModel |
|
|
import uvicorn |
|
|
import gradio as gr |
|
|
|
|
|
from config.settings import ( |
|
|
MODELS, |
|
|
ROUTER_CONFIG, |
|
|
SYSTEM_LIMITS, |
|
|
MEMORY_CONFIG, |
|
|
DEVICE, |
|
|
HF_TOKEN, |
|
|
API_CONFIG, |
|
|
) |
|
|
from modules.code_module import CodeModule |
|
|
from modules.chat_module import ChatModule |
|
|
from modules.reason_module import ReasonModule |
|
|
from modules.fast_module import FastModule |
|
|
from router.intelligent_router import IntelligentRouter |
|
|
from core.token_capsule import TokenCapsuleLayer |
|
|
from memory.local_memory import LocalMemoryManager |
|
|
|
|
|
|
|
|
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") |
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def find_free_port(start_port: int = 7860, max_port: int = 7870) -> int: |
|
|
"""Belirtilen aralıkta boş port bulur""" |
|
|
for port in range(start_port, max_port + 1): |
|
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: |
|
|
if sock.connect_ex(("localhost", port)) != 0: |
|
|
return port |
|
|
raise RuntimeError("Boş port bulunamadı") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class QueryRequest(BaseModel): |
|
|
query: str |
|
|
context: Dict[str, Any] = {} |
|
|
user_id: str = "default" |
|
|
|
|
|
class QueryResponse(BaseModel): |
|
|
response: str |
|
|
module_used: str |
|
|
confidence: float |
|
|
execution_time: float |
|
|
routing_info: Dict[str, Any] = {} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class CRANEAISystem: |
|
|
"""CRANE AI ana orkestratör sınıfı""" |
|
|
|
|
|
def __init__(self): |
|
|
self.modules: Dict[str, Any] = {} |
|
|
self.router: IntelligentRouter | None = None |
|
|
self.token_layer: TokenCapsuleLayer | None = None |
|
|
self.memory_manager: LocalMemoryManager | None = None |
|
|
self.is_initialized: bool = False |
|
|
|
|
|
self.stats = { |
|
|
"total_queries": 0, |
|
|
"successful_queries": 0, |
|
|
"failed_queries": 0, |
|
|
"avg_response_time": 0.0, |
|
|
"uptime_start": datetime.now(), |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
async def initialize(self): |
|
|
"""Sistemdeki tüm alt bileşenleri başlatır""" |
|
|
logger.info("🚀 CRANE AI sistemi başlatılıyor...") |
|
|
await self._load_modules() |
|
|
await self._initialize_router() |
|
|
await self._initialize_token_layer() |
|
|
await self._initialize_memory() |
|
|
self.is_initialized = True |
|
|
logger.info("✅ CRANE AI sistemi hazır!") |
|
|
|
|
|
async def shutdown(self): |
|
|
"""Sistemi kapatır ve kaynakları temizler""" |
|
|
logger.info("🔄 Sistem kapatılıyor...") |
|
|
if self.memory_manager: |
|
|
await self.memory_manager.stop() |
|
|
for module in self.modules.values(): |
|
|
if hasattr(module, "unload_model"): |
|
|
module.unload_model() |
|
|
logger.info("👋 Sistem kapatıldı") |
|
|
|
|
|
|
|
|
|
|
|
async def _load_modules(self): |
|
|
logger.info("📦 MicroModule'lar yükleniyor...") |
|
|
for name, cfg in MODELS.items(): |
|
|
try: |
|
|
config = {**cfg, "device": DEVICE, "hf_token": HF_TOKEN} |
|
|
if name == "code_module": |
|
|
module = CodeModule(config) |
|
|
elif name == "chat_module": |
|
|
module = ChatModule(config) |
|
|
elif name == "reason_module": |
|
|
module = ReasonModule(config) |
|
|
elif name == "fast_module": |
|
|
module = FastModule(config) |
|
|
else: |
|
|
continue |
|
|
self.modules[name] = module |
|
|
logger.info(f" ✅ {name} yüklendi") |
|
|
except Exception as e: |
|
|
logger.error(f" ❌ {name} yükleme hatası: {e}") |
|
|
logger.info(f"📦 {len(self.modules)} modül yüklendi") |
|
|
|
|
|
async def _initialize_router(self): |
|
|
logger.info("🎯 Router başlatılıyor...") |
|
|
self.router = IntelligentRouter(self.modules, ROUTER_CONFIG) |
|
|
logger.info("🎯 Router başlatıldı") |
|
|
|
|
|
async def _initialize_token_layer(self): |
|
|
logger.info("🔤 Token Capsule Layer başlatılıyor...") |
|
|
self.token_layer = TokenCapsuleLayer({ |
|
|
"max_length": SYSTEM_LIMITS["max_input_length"], |
|
|
"device": DEVICE, |
|
|
"hf_token": HF_TOKEN, |
|
|
"cache_size": 500, |
|
|
}) |
|
|
logger.info("🔤 Token Capsule Layer başlatıldı") |
|
|
|
|
|
async def _initialize_memory(self): |
|
|
logger.info("🧠 Memory Manager başlatılıyor...") |
|
|
self.memory_manager = LocalMemoryManager(MEMORY_CONFIG) |
|
|
await self.memory_manager.start() |
|
|
logger.info("🧠 Memory Manager başlatıldı") |
|
|
|
|
|
|
|
|
|
|
|
async def process_query(self, query: str, context: Dict[str, Any] | None = None, user_id: str = "default") -> Dict[str, Any]: |
|
|
start = datetime.now() |
|
|
context = context or {} |
|
|
try: |
|
|
if not self.is_initialized: |
|
|
raise RuntimeError("Sistem henüz başlatılmadı") |
|
|
|
|
|
|
|
|
user_history = await self.memory_manager.retrieve(f"history_{user_id}") if self.memory_manager else None |
|
|
if user_history: |
|
|
context["history"] = user_history |
|
|
|
|
|
result = await self.router.route_query(query, context) if self.router else {"error": "Router yok"} |
|
|
|
|
|
|
|
|
if self.memory_manager: |
|
|
updated_history: List[str] = user_history or [] |
|
|
updated_history.append(query) |
|
|
if len(updated_history) > 10: |
|
|
updated_history = updated_history[-10:] |
|
|
await self.memory_manager.store(f"history_{user_id}", updated_history, ttl=86_400) |
|
|
|
|
|
exec_time = (datetime.now() - start).total_seconds() |
|
|
self._update_stats(exec_time, "error" not in result) |
|
|
|
|
|
if "error" in result: |
|
|
raise RuntimeError(result["error"]) |
|
|
|
|
|
return { |
|
|
"response": result.get("response", "Yanıt alınamadı"), |
|
|
"module_used": result.get("module", "unknown"), |
|
|
"confidence": result.get("confidence", 0.0), |
|
|
"execution_time": exec_time, |
|
|
"routing_info": result.get("routing_info", {}), |
|
|
} |
|
|
except Exception as e: |
|
|
exec_time = (datetime.now() - start).total_seconds() |
|
|
self._update_stats(exec_time, False) |
|
|
logger.error(f"Sorgu hatası: {e}") |
|
|
return {"error": str(e), "execution_time": exec_time} |
|
|
|
|
|
|
|
|
|
|
|
def _update_stats(self, exec_time: float, success: bool): |
|
|
self.stats["total_queries"] += 1 |
|
|
if success: |
|
|
self.stats["successful_queries"] += 1 |
|
|
else: |
|
|
self.stats["failed_queries"] += 1 |
|
|
total_time = self.stats["avg_response_time"] * (self.stats["total_queries"] - 1) |
|
|
self.stats["avg_response_time"] = (total_time + exec_time) / self.stats["total_queries"] |
|
|
|
|
|
def get_system_stats(self) -> Dict[str, Any]: |
|
|
uptime = datetime.now() - self.stats["uptime_start"] |
|
|
return { |
|
|
"system_info": { |
|
|
"uptime": str(uptime), |
|
|
"is_initialized": self.is_initialized, |
|
|
"device": DEVICE, |
|
|
}, |
|
|
"query_stats": self.stats, |
|
|
"router": self.router.get_stats() if self.router else {}, |
|
|
"token": self.token_layer.get_stats() if self.token_layer else {}, |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
crane_system = CRANEAISystem() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@asynccontextmanager |
|
|
async def lifespan(app: FastAPI): |
|
|
await crane_system.initialize() |
|
|
yield |
|
|
await crane_system.shutdown() |
|
|
|
|
|
app = FastAPI(title="CRANE AI API", version="1.0.0", lifespan=lifespan) |
|
|
|
|
|
app.add_middleware( |
|
|
CORSMiddleware, |
|
|
allow_origins=["*"], |
|
|
allow_credentials=True, |
|
|
allow_methods=["*"], |
|
|
allow_headers=["*"], |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
@app.post("/query", response_model=QueryResponse) |
|
|
async def query_endpoint(req: QueryRequest): |
|
|
result = await crane_system.process_query(req.query, req.context, req.user_id) |
|
|
if "error" in result: |
|
|
raise HTTPException(status_code=500, detail=result["error"]) |
|
|
return QueryResponse(**result) |
|
|
|
|
|
@app.get("/stats") |
|
|
async def stats_endpoint(): |
|
|
return crane_system.get_system_stats() |
|
|
|
|
|
@app.get("/health") |
|
|
async def health(): |
|
|
return {"status": "initialized" if crane_system.is_initialized else "initializing"} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def gradio_interface(message: str, history: List[Dict[str, str]] | None = None): |
|
|
res = await crane_system.process_query(message, {"history": history or []}) |
|
|
if "error" in res: |
|
|
return f"❌ Hata: {res['error']}" |
|
|
return f"**Yanıt:**\n{res['response']}\n\n**Sistem Bilgileri:**\n- Kullanılan Modül: {res['module_used']}\n- Güven Skoru: {res['confidence']:.2f}\n- Süre: {res['execution_time']:.2f}s" |
|
|
|
|
|
demo = gr.ChatInterface( |
|
|
fn=gradio_interface, |
|
|
type="messages", |
|
|
title="🏗️ CRANE AI - Hibrit Yapay Zeka Sistemi", |
|
|
description="Compressed Routing and Neural Embedding ile güçlendirilmiş AI sistemi", |
|
|
theme=gr.themes.Soft(), |
|
|
examples=[ |
|
|
"Merhaba! Nasılsın?", |
|
|
"Python'da bir hesap makinesi yaz", |
|
|
"Yapay zeka nedir?", |
|
|
"Bir web sitesi tasarla", |
|
|
"Bugün hava nasıl?", |
|
|
], |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def main(): |
|
|
|
|
|
gradio_port = find_free_port(7860, 7870) |
|
|
logger.info(f"🌐 Gradio port: {gradio_port}") |
|
|
|
|
|
|
|
|
demo.launch(server_name="0.0.0.0", server_port=gradio_port, share=False, inbrowser=False) |
|
|
|
|
|
|
|
|
config = uvicorn.Config(app=app, host=API_CONFIG["host"], port=API_CONFIG["port"], workers=API_CONFIG["workers"]) |
|
|
server = uvicorn.Server(config) |
|
|
await server.serve() |
|
|
|
|
|
if __name__ == "__main__": |
|
|
asyncio.run(main()) |