akra35567 commited on
Commit
8e6661d
·
verified ·
1 Parent(s): 88a8f8f

Upload 22 files

Browse files
modules/api.py CHANGED
@@ -340,7 +340,10 @@ class LLMManager:
340
  else:
341
  text = str(response)
342
  except Exception as api_error:
343
- logger.warning(f"Gemini nova API erro: {api_error}")
 
 
 
344
  return None
345
  elif self.gemini_model:
346
  response = self.gemini_model.generate_content(full_prompt)
@@ -374,7 +377,10 @@ class LLMManager:
374
  if text:
375
  return text.strip()
376
  except Exception as e:
377
- logger.warning(f"Groq erro: {e}")
 
 
 
378
  return None
379
 
380
  def _call_grok(self, system_prompt: str, context_history: List[dict], user_prompt: str) -> Optional[str]:
 
340
  else:
341
  text = str(response)
342
  except Exception as api_error:
343
+ if "400" in str(api_error) or "API_KEY_INVALID" in str(api_error):
344
+ logger.error(f"Gemini: API KEY inválida ou erro de argumento (400).")
345
+ else:
346
+ logger.warning(f"Gemini nova API erro: {api_error}")
347
  return None
348
  elif self.gemini_model:
349
  response = self.gemini_model.generate_content(full_prompt)
 
377
  if text:
378
  return text.strip()
379
  except Exception as e:
380
+ if "401" in str(e) or "Unauthorized" in str(e):
381
+ logger.error(f"Groq: Erro de Autenticação (401). Verifique a API KEY.")
382
+ else:
383
+ logger.warning(f"Groq erro: {e}")
384
  return None
385
 
386
  def _call_grok(self, system_prompt: str, context_history: List[dict], user_prompt: str) -> Optional[str]:
modules/persona_tracker.py CHANGED
@@ -86,8 +86,8 @@ Retorne APENAS um JSON válido estruturado assim (e NADA de texto fora das chave
86
  """
87
 
88
  # Chama o LLM (garante formato json)
89
- # O MultiLLMClient / AkiraAPI tem _generate_response(prompt, context_history)
90
- response_json_str = self.llm_client._generate_response(prompt, [])
91
 
92
  if not response_json_str:
93
  return
 
86
  """
87
 
88
  # Chama o LLM (garante formato json)
89
+ # O AkiraAPI tem o método .generate(prompt, context_history)
90
+ response_json_str = self.llm_client.generate(prompt, [])
91
 
92
  if not response_json_str:
93
  return
modules/short_term_memory.py CHANGED
@@ -436,6 +436,10 @@ class ShortTermMemory:
436
  def get_messages(self, limit: int = 10) -> List[MessageWithContext]:
437
  """Alias para get_last_n_messages (compatibilidade PersonaTracker)."""
438
  return self.get_last_n_messages(limit)
 
 
 
 
439
 
440
  def get_last_n_messages(self, n: int) -> List[MessageWithContext]:
441
  """
 
436
  def get_messages(self, limit: int = 10) -> List[MessageWithContext]:
437
  """Alias para get_last_n_messages (compatibilidade PersonaTracker)."""
438
  return self.get_last_n_messages(limit)
439
+
440
+ def get_context(self, **kwargs) -> List[MessageWithContext]:
441
+ """Alias para get_context_window."""
442
+ return self.get_context_window(**kwargs)
443
 
444
  def get_last_n_messages(self, n: int) -> List[MessageWithContext]:
445
  """
modules/unified_context.py CHANGED
@@ -554,7 +554,7 @@ class ShortTermMemoryManager:
554
  if conversation_id not in self._instances:
555
  return []
556
  stm = self._instances[conversation_id]
557
- result = stm.get_context(
558
  include_replies=include_replies,
559
  prioritize_replies=True,
560
  max_messages=limit
 
554
  if conversation_id not in self._instances:
555
  return []
556
  stm = self._instances[conversation_id]
557
+ result = stm.get_context_window(
558
  include_replies=include_replies,
559
  prioritize_replies=True,
560
  max_messages=limit