offload / ai_controller.py
osamabyc86's picture
Upload 73 files
222080b verified
import os
import requests
from config import SYSTEM_PROMPT, DEFAULT_MODEL
# إعداد متغيرات Ollama
OLLAMA_HOST = os.getenv("OLLAMA_HOST", "127.0.0.1")
OLLAMA_PORT = int(os.getenv("OLLAMA_PORT", "11434"))
OLLAMA_URL = f"http://{OLLAMA_HOST}:{OLLAMA_PORT}/api/chat"
class AIController:
def __init__(self):
self.memory = []
self.system_prompt = SYSTEM_PROMPT
def generate_response(self, user_input, language="ar", model=DEFAULT_MODEL):
# بناء الرسائل (system + memory + user)
messages = [{"role": "system", "content": self.system_prompt}]
messages += self.memory
messages.append({"role": "user", "content": user_input})
try:
# إرسال طلب إلى Ollama API
response = requests.post(
OLLAMA_URL,
json={
"model": model,
"messages": messages,
"stream": False
},
timeout=120
)
response.raise_for_status()
data = response.json()
answer = data.get("message", {}).get("content", "").strip()
# تحديث الذاكرة
self.memory.append({"role": "user", "content": user_input})
self.memory.append({"role": "assistant", "content": answer})
# تحسين السياق
self.self_improve(user_input, answer)
return answer
except Exception as e:
return self.external_search(user_input)
def self_improve(self, user_input, answer):
if len(self.memory) > 20:
self.memory = self.memory[-20:]
def external_search(self, query):
return "عذراً، لم أجد جواباً دقيقاً على سؤالك حالياً."