Spaces:
Sleeping
Sleeping
File size: 3,045 Bytes
0bb403d 61a03c8 64183b3 fdeabc3 44091c5 0bb403d 61a03c8 8ee20aa 61a03c8 0bb403d 61a03c8 44298e7 9532cd9 8ee20aa 9532cd9 d721fef 96bc042 d721fef 9532cd9 61a03c8 44091c5 96bc042 fdeabc3 cd59240 fdeabc3 cd59240 fdeabc3 9532cd9 cd59240 fdeabc3 9532cd9 fdeabc3 9532cd9 fdeabc3 9532cd9 cd59240 fdeabc3 cd59240 fdeabc3 cd59240 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 | import os
import re
import requests
# MANTENHA ESTES IMPORTS (essenciais para ler o config.py)
from config import DOCS, MODEL_PRIORITY_A, MODEL_PRIORITY_B
def detect_language(passage):
greek_chars = re.findall(r'[\u0370-\u03FF\u1F00-\u1FFF]', passage)
return 'greek' if len(greek_chars) > len(passage) * 0.1 else 'latin'
def call_openrouter(passage, mode, category):
api_key = os.getenv("OPENROUTER_API_KEY")
lang = detect_language(passage)
url_doc = DOCS.get(category)
try:
resp = requests.get(url_doc, timeout=15)
resp.raise_for_status()
questions = [l.strip() for l in resp.text.splitlines() if l.strip().endswith('?')]
except Exception as e:
return f"Erro ao acessar Google Docs: {str(e)}", "Nenhum"
model_chain = MODEL_PRIORITY_A if "Alta" in mode else MODEL_PRIORITY_B
full_report = [f"--- ANÁLISE FILOLÓGICA: {category.upper()} ---", f"Texto: {passage}\n"]
batch_size = 5
# --- Lógica de Seleção de Modelo com Fallback ---
working_model = None
for model_candidate in model_chain:
try:
# Teste rápido para ver se o modelo responde
test_resp = requests.post(
url="https://openrouter.ai/api/v1/chat/completions",
headers={"Authorization": f"Bearer {api_key}"},
json={"model": model_candidate, "messages": [{"role": "user", "content": "test"}], "max_tokens": 1},
timeout=10
)
if test_resp.status_code == 200:
working_model = model_candidate
break
except:
continue
if not working_model:
return "Erro: Nenhum modelo da lista está disponível no momento (OpenRouter Offline?).", "Falha"
# --- Execução dos Lotes com o Modelo que Funcionou ---
for i in range(0, len(questions), batch_size):
batch = questions[i:i + batch_size]
prompt = f"""Atue como um Filólogo especialista em {lang}.
Passagem: "{passage}"
Responda detalhadamente em PORTUGUÊS.
OBRIGATÓRIO: Escreva a PERGUNTA completa antes de cada resposta.
QUESTÕES:
{chr(10).join(batch)}"""
try:
response = requests.post(
url="https://openrouter.ai/api/v1/chat/completions",
headers={"Authorization": f"Bearer {api_key}"},
json={
"model": working_model,
"messages": [{"role": "user", "content": prompt}],
"temperature": 0.1,
"max_tokens": 4000
},
timeout=120
)
if response.status_code == 200:
full_report.append(response.json()['choices'][0]['message']['content'])
else:
full_report.append(f"\n[Erro no lote {i//batch_size + 1}: Status {response.status_code}]")
except Exception as e:
full_report.append(f"\n[Falha de conexão: {str(e)}]")
return "\n\n".join(full_report), working_model |