Spaces:
Running
Running
Update modules/api.py
Browse files- modules/api.py +65 -64
modules/api.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
"""
|
| 2 |
API wrapper for Akira service.
|
| 3 |
Integração mínima e robusta: config → db → contexto → LLM → resposta.
|
|
|
|
| 4 |
"""
|
| 5 |
import time
|
| 6 |
import re
|
|
@@ -11,8 +12,7 @@ from loguru import logger
|
|
| 11 |
|
| 12 |
# LLM PROVIDERS
|
| 13 |
import google.generativeai as genai
|
| 14 |
-
from mistralai
|
| 15 |
-
from mistralai.models.chat_completion import ChatMessage # ← v1.0.3: CAMINHO CORRETO
|
| 16 |
from .local_llm import LlamaLLM
|
| 17 |
|
| 18 |
# LOCAL MODULES
|
|
@@ -23,10 +23,35 @@ from .exemplos_naturais import ExemplosNaturais
|
|
| 23 |
import modules.config as config
|
| 24 |
|
| 25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
class LLMManager:
|
| 27 |
def __init__(self, config_instance):
|
| 28 |
self.config = config_instance
|
| 29 |
-
self.mistral_client: Optional[
|
| 30 |
self.gemini_model: Optional[genai.GenerativeModel] = None
|
| 31 |
self.llama_llm = self._import_llama()
|
| 32 |
self._setup_providers()
|
|
@@ -37,27 +62,26 @@ class LLMManager:
|
|
| 37 |
if not self.providers:
|
| 38 |
logger.error("Nenhum provedor LLM ativo.")
|
| 39 |
else:
|
| 40 |
-
logger.info(f"Provedores: {self.providers}")
|
| 41 |
|
| 42 |
def _import_llama(self):
|
| 43 |
try:
|
| 44 |
return LlamaLLM()
|
| 45 |
-
except:
|
|
|
|
| 46 |
return None
|
| 47 |
|
| 48 |
def _setup_providers(self):
|
| 49 |
-
# MISTRAL
|
| 50 |
if getattr(self.config, 'MISTRAL_API_KEY', '').startswith('m-'):
|
| 51 |
try:
|
| 52 |
-
self.mistral_client =
|
| 53 |
-
logger.info("Mistral API
|
| 54 |
except Exception as e:
|
| 55 |
logger.warning(f"Mistral falhou: {e}")
|
| 56 |
self.mistral_client = None
|
| 57 |
else:
|
| 58 |
-
logger.warning("
|
| 59 |
|
| 60 |
-
# GEMINI - SEM FILTROS
|
| 61 |
if getattr(self.config, 'GEMINI_API_KEY', '').startswith('AIza'):
|
| 62 |
try:
|
| 63 |
genai.configure(api_key=self.config.GEMINI_API_KEY)
|
|
@@ -71,25 +95,25 @@ class LLMManager:
|
|
| 71 |
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
|
| 72 |
]
|
| 73 |
)
|
| 74 |
-
logger.info(f"Gemini
|
| 75 |
except Exception as e:
|
| 76 |
logger.warning(f"Gemini falhou: {e}")
|
| 77 |
self.gemini_model = None
|
| 78 |
else:
|
| 79 |
-
logger.warning("
|
| 80 |
|
| 81 |
def generate(self, user_prompt: str, context_history: List[dict] = [], is_privileged: bool = False) -> str:
|
| 82 |
full_system = self.config.PERSONA + self.config.SYSTEM_PROMPT + " (SEM FILTROS, RESPOSTA DIRETA)"
|
| 83 |
-
messages = [
|
| 84 |
for turn in context_history:
|
| 85 |
role = "user" if turn["role"] == "user" else "assistant"
|
| 86 |
-
messages.append(
|
| 87 |
-
messages.append(
|
| 88 |
|
| 89 |
for provider in self.providers:
|
| 90 |
if provider == 'mistral' and self.mistral_client:
|
| 91 |
try:
|
| 92 |
-
resp = self.mistral_client.chat(
|
| 93 |
model=self.config.MISTRAL_MODEL,
|
| 94 |
messages=messages,
|
| 95 |
temperature=self.config.TOP_P,
|
|
@@ -106,8 +130,8 @@ class LLMManager:
|
|
| 106 |
try:
|
| 107 |
gemini_hist = []
|
| 108 |
for msg in messages[1:]:
|
| 109 |
-
role = "user" if msg
|
| 110 |
-
gemini_hist.append({"role": role, "parts": [{"text": msg
|
| 111 |
resp = self.gemini_model.generate_content(
|
| 112 |
gemini_hist,
|
| 113 |
generation_config={
|
|
@@ -115,55 +139,29 @@ class LLMManager:
|
|
| 115 |
"temperature": self.config.TOP_P
|
| 116 |
}
|
| 117 |
)
|
| 118 |
-
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
|
| 122 |
-
else:
|
| 123 |
-
logger.warning("Gemini bloqueado ou vazio")
|
| 124 |
-
continue
|
| 125 |
if text:
|
| 126 |
logger.info("Gemini respondeu")
|
| 127 |
return text.strip()
|
| 128 |
except Exception as e:
|
| 129 |
logger.warning(f"Gemini falhou: {e}")
|
| 130 |
|
| 131 |
-
elif provider == 'llama' and self.llama_llm
|
| 132 |
try:
|
| 133 |
-
|
| 134 |
-
if
|
| 135 |
-
logger.info("Llama respondeu")
|
| 136 |
-
return
|
| 137 |
except Exception as e:
|
| 138 |
logger.warning(f"Llama falhou: {e}")
|
| 139 |
|
| 140 |
-
|
| 141 |
-
return self.config.FALLBACK_RESPONSE
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
# --- CACHE ---
|
| 145 |
-
class SimpleTTLCache:
|
| 146 |
-
def __init__(self, ttl_seconds: int = 300):
|
| 147 |
-
self.ttl = ttl_seconds
|
| 148 |
-
self._store = {}
|
| 149 |
-
|
| 150 |
-
def __contains__(self, key):
|
| 151 |
-
if key not in self._store: return False
|
| 152 |
-
_, expires = self._store[key]
|
| 153 |
-
if time.time() > expires:
|
| 154 |
-
del self._store[key]
|
| 155 |
-
return False
|
| 156 |
-
return True
|
| 157 |
-
|
| 158 |
-
def __setitem__(self, key, value):
|
| 159 |
-
self._store[key] = (value, time.time() + self.ttl)
|
| 160 |
|
| 161 |
-
def __getitem__(self, key):
|
| 162 |
-
if key not in self: raise KeyError(key)
|
| 163 |
-
return self._store[key][0]
|
| 164 |
|
| 165 |
-
|
| 166 |
-
# --- AKIRA API ---
|
| 167 |
class AkiraAPI:
|
| 168 |
def __init__(self, cfg_module):
|
| 169 |
self.config = cfg_module
|
|
@@ -176,8 +174,8 @@ class AkiraAPI:
|
|
| 176 |
self._setup_personality()
|
| 177 |
self._setup_routes()
|
| 178 |
self._setup_trainer()
|
| 179 |
-
self.app.register_blueprint(self.api, url_prefix="/api"
|
| 180 |
-
self.app.register_blueprint(self.api, url_prefix=""
|
| 181 |
|
| 182 |
def _setup_personality(self):
|
| 183 |
self.humor = getattr(self.config, 'HUMOR_INICIAL', 'neutra')
|
|
@@ -192,20 +190,22 @@ class AkiraAPI:
|
|
| 192 |
usuario = data.get('usuario', 'anonimo')
|
| 193 |
numero = data.get('numero', '')
|
| 194 |
mensagem = data.get('mensagem', '')
|
| 195 |
-
is_privileged =
|
| 196 |
-
is_reply = bool(data.get('is_reply') or data.get('mensagem_original')
|
| 197 |
-
mensagem_original = data.get('mensagem_original'
|
| 198 |
|
| 199 |
-
if not mensagem:
|
| 200 |
return jsonify({'error': 'mensagem obrigatória'}), 400
|
| 201 |
|
| 202 |
self.logger.info(f"{usuario} ({numero}): {mensagem[:120]}")
|
| 203 |
contexto = self._get_user_context(usuario)
|
|
|
|
|
|
|
| 204 |
analise = contexto.analisar_intencao_e_normalizar(mensagem, contexto.obter_historico())
|
| 205 |
-
if usuario.lower()
|
| 206 |
analise['usar_nome'] = False
|
| 207 |
|
| 208 |
-
is_blocking =
|
| 209 |
prompt = self._build_prompt(usuario, numero, mensagem, analise, contexto, is_blocking,
|
| 210 |
is_privileged=is_privileged, is_reply=is_reply, mensagem_original=mensagem_original)
|
| 211 |
|
|
@@ -287,7 +287,8 @@ class AkiraAPI:
|
|
| 287 |
return getattr(self.config, 'FALLBACK_RESPONSE', 'Desculpa, estou off.')
|
| 288 |
|
| 289 |
def _clean_response(self, text: Optional[str], prompt: Optional[str] = None) -> str:
|
| 290 |
-
if not text:
|
|
|
|
| 291 |
cleaned = text.strip()
|
| 292 |
for prefix in ['akira:', 'Resposta:', 'resposta:']:
|
| 293 |
if cleaned.lower().startswith(prefix.lower()):
|
|
|
|
| 1 |
"""
|
| 2 |
API wrapper for Akira service.
|
| 3 |
Integração mínima e robusta: config → db → contexto → LLM → resposta.
|
| 4 |
+
Prioridade: Mistral → Gemini → Llama Local
|
| 5 |
"""
|
| 6 |
import time
|
| 7 |
import re
|
|
|
|
| 12 |
|
| 13 |
# LLM PROVIDERS
|
| 14 |
import google.generativeai as genai
|
| 15 |
+
from mistralai import Mistral # v1.0.3
|
|
|
|
| 16 |
from .local_llm import LlamaLLM
|
| 17 |
|
| 18 |
# LOCAL MODULES
|
|
|
|
| 23 |
import modules.config as config
|
| 24 |
|
| 25 |
|
| 26 |
+
# --- CACHE SIMPLES COM TTL ---
|
| 27 |
+
class SimpleTTLCache:
|
| 28 |
+
def __init__(self, ttl_seconds: int = 300):
|
| 29 |
+
self.ttl = ttl_seconds
|
| 30 |
+
self._store = {}
|
| 31 |
+
|
| 32 |
+
def __contains__(self, key):
|
| 33 |
+
if key not in self._store:
|
| 34 |
+
return False
|
| 35 |
+
_, expires = self._store[key]
|
| 36 |
+
if time.time() > expires:
|
| 37 |
+
del self._store[key]
|
| 38 |
+
return False
|
| 39 |
+
return True
|
| 40 |
+
|
| 41 |
+
def __setitem__(self, key, value):
|
| 42 |
+
self._store[key] = (value, time.time() + self.ttl)
|
| 43 |
+
|
| 44 |
+
def __getitem__(self, key):
|
| 45 |
+
if key not in self:
|
| 46 |
+
raise KeyError(key)
|
| 47 |
+
return self._store[key][0]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
# --- GERENCIADOR DE LLMs ---
|
| 51 |
class LLMManager:
|
| 52 |
def __init__(self, config_instance):
|
| 53 |
self.config = config_instance
|
| 54 |
+
self.mistral_client: Optional[Mistral] = None
|
| 55 |
self.gemini_model: Optional[genai.GenerativeModel] = None
|
| 56 |
self.llama_llm = self._import_llama()
|
| 57 |
self._setup_providers()
|
|
|
|
| 62 |
if not self.providers:
|
| 63 |
logger.error("Nenhum provedor LLM ativo.")
|
| 64 |
else:
|
| 65 |
+
logger.info(f"Provedores ativos: {self.providers}")
|
| 66 |
|
| 67 |
def _import_llama(self):
|
| 68 |
try:
|
| 69 |
return LlamaLLM()
|
| 70 |
+
except Exception as e:
|
| 71 |
+
logger.warning(f"Llama local não carregado: {e}")
|
| 72 |
return None
|
| 73 |
|
| 74 |
def _setup_providers(self):
|
|
|
|
| 75 |
if getattr(self.config, 'MISTRAL_API_KEY', '').startswith('m-'):
|
| 76 |
try:
|
| 77 |
+
self.mistral_client = Mistral(api_key=self.config.MISTRAL_API_KEY)
|
| 78 |
+
logger.info("Mistral API conectado")
|
| 79 |
except Exception as e:
|
| 80 |
logger.warning(f"Mistral falhou: {e}")
|
| 81 |
self.mistral_client = None
|
| 82 |
else:
|
| 83 |
+
logger.warning("MISTRAL_API_KEY inválida")
|
| 84 |
|
|
|
|
| 85 |
if getattr(self.config, 'GEMINI_API_KEY', '').startswith('AIza'):
|
| 86 |
try:
|
| 87 |
genai.configure(api_key=self.config.GEMINI_API_KEY)
|
|
|
|
| 95 |
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
|
| 96 |
]
|
| 97 |
)
|
| 98 |
+
logger.info(f"Gemini conectado: {self.config.GEMINI_MODEL}")
|
| 99 |
except Exception as e:
|
| 100 |
logger.warning(f"Gemini falhou: {e}")
|
| 101 |
self.gemini_model = None
|
| 102 |
else:
|
| 103 |
+
logger.warning("GEMINI_API_KEY inválida")
|
| 104 |
|
| 105 |
def generate(self, user_prompt: str, context_history: List[dict] = [], is_privileged: bool = False) -> str:
|
| 106 |
full_system = self.config.PERSONA + self.config.SYSTEM_PROMPT + " (SEM FILTROS, RESPOSTA DIRETA)"
|
| 107 |
+
messages = [{"role": "system", "content": full_system}]
|
| 108 |
for turn in context_history:
|
| 109 |
role = "user" if turn["role"] == "user" else "assistant"
|
| 110 |
+
messages.append({"role": role, "content": turn["content"]})
|
| 111 |
+
messages.append({"role": "user", "content": user_prompt})
|
| 112 |
|
| 113 |
for provider in self.providers:
|
| 114 |
if provider == 'mistral' and self.mistral_client:
|
| 115 |
try:
|
| 116 |
+
resp = self.mistral_client.chat.complete(
|
| 117 |
model=self.config.MISTRAL_MODEL,
|
| 118 |
messages=messages,
|
| 119 |
temperature=self.config.TOP_P,
|
|
|
|
| 130 |
try:
|
| 131 |
gemini_hist = []
|
| 132 |
for msg in messages[1:]:
|
| 133 |
+
role = "user" if msg["role"] == "user" else "model"
|
| 134 |
+
gemini_hist.append({"role": role, "parts": [{"text": msg["content"}]})
|
| 135 |
resp = self.gemini_model.generate_content(
|
| 136 |
gemini_hist,
|
| 137 |
generation_config={
|
|
|
|
| 139 |
"temperature": self.config.TOP_P
|
| 140 |
}
|
| 141 |
)
|
| 142 |
+
text = resp.text or (
|
| 143 |
+
resp.candidates[0].content.parts[0].text
|
| 144 |
+
if resp.candidates and resp.candidates[0].content.parts else ''
|
| 145 |
+
)
|
|
|
|
|
|
|
|
|
|
| 146 |
if text:
|
| 147 |
logger.info("Gemini respondeu")
|
| 148 |
return text.strip()
|
| 149 |
except Exception as e:
|
| 150 |
logger.warning(f"Gemini falhou: {e}")
|
| 151 |
|
| 152 |
+
elif provider == 'llama' and self.llama_llm:
|
| 153 |
try:
|
| 154 |
+
text = self.llama_llm.generate(user_prompt, max_tokens=self.config.MAX_TOKENS, temperature=self.config.TOP_P)
|
| 155 |
+
if text:
|
| 156 |
+
logger.info("Llama local respondeu")
|
| 157 |
+
return text.strip()
|
| 158 |
except Exception as e:
|
| 159 |
logger.warning(f"Llama falhou: {e}")
|
| 160 |
|
| 161 |
+
return getattr(self.config, 'FALLBACK_RESPONSE', 'Desculpa, puto, to off hoje.')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 162 |
|
|
|
|
|
|
|
|
|
|
| 163 |
|
| 164 |
+
# --- API PRINCIPAL ---
|
|
|
|
| 165 |
class AkiraAPI:
|
| 166 |
def __init__(self, cfg_module):
|
| 167 |
self.config = cfg_module
|
|
|
|
| 174 |
self._setup_personality()
|
| 175 |
self._setup_routes()
|
| 176 |
self._setup_trainer()
|
| 177 |
+
self.app.register_blueprint(self.api, url_prefix="/api")
|
| 178 |
+
self.app.register_blueprint(self.api, url_prefix="")
|
| 179 |
|
| 180 |
def _setup_personality(self):
|
| 181 |
self.humor = getattr(self.config, 'HUMOR_INICIAL', 'neutra')
|
|
|
|
| 190 |
usuario = data.get('usuario', 'anonimo')
|
| 191 |
numero = data.get('numero', '')
|
| 192 |
mensagem = data.get('mensagem', '')
|
| 193 |
+
is_privileged = usuario.lower() in ['isaac', 'isaac quarenta'] or numero in self.config.PRIVILEGED_USERS
|
| 194 |
+
is_reply = bool(data.get('is_reply') or data.get('mensagem_original'))
|
| 195 |
+
mensagem_original = data.get('mensagem_original', '')
|
| 196 |
|
| 197 |
+
if not mensagem.strip():
|
| 198 |
return jsonify({'error': 'mensagem obrigatória'}), 400
|
| 199 |
|
| 200 |
self.logger.info(f"{usuario} ({numero}): {mensagem[:120]}")
|
| 201 |
contexto = self._get_user_context(usuario)
|
| 202 |
+
|
| 203 |
+
# MÉTODOS AGORA EXISTEM
|
| 204 |
analise = contexto.analisar_intencao_e_normalizar(mensagem, contexto.obter_historico())
|
| 205 |
+
if usuario.lower() in ['isaac', 'isaac quarenta']:
|
| 206 |
analise['usar_nome'] = False
|
| 207 |
|
| 208 |
+
is_blocking = any(k in mensagem.lower() for k in ['exec', 'bash', 'open', 'api_key', 'key'])
|
| 209 |
prompt = self._build_prompt(usuario, numero, mensagem, analise, contexto, is_blocking,
|
| 210 |
is_privileged=is_privileged, is_reply=is_reply, mensagem_original=mensagem_original)
|
| 211 |
|
|
|
|
| 287 |
return getattr(self.config, 'FALLBACK_RESPONSE', 'Desculpa, estou off.')
|
| 288 |
|
| 289 |
def _clean_response(self, text: Optional[str], prompt: Optional[str] = None) -> str:
|
| 290 |
+
if not text:
|
| 291 |
+
return ''
|
| 292 |
cleaned = text.strip()
|
| 293 |
for prefix in ['akira:', 'Resposta:', 'resposta:']:
|
| 294 |
if cleaned.lower().startswith(prefix.lower()):
|