AniseF commited on
Commit
8ee20aa
·
verified ·
1 Parent(s): 50bda4c

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +26 -42
utils.py CHANGED
@@ -1,9 +1,7 @@
1
  import os
2
  import re
3
  import requests
4
- import time
5
 
6
- # URLs dos documentos de perguntas
7
  URLS = {
8
  "Syntax": "https://raw.githubusercontent.com/u-cl-lab/classical-query/main/syntax.txt",
9
  "Morphology": "https://raw.githubusercontent.com/u-cl-lab/classical-query/main/morphology.txt",
@@ -11,57 +9,43 @@ URLS = {
11
  }
12
 
13
  def detect_language(passage):
14
- """Detecta se o texto é Grego ou Latim (Baseado na lógica do Thomas)"""
15
  greek_chars = re.findall(r'[\u0370-\u03FF\u1F00-\u1FFF]', passage)
16
- if greek_chars and len(greek_chars) > len(passage) * 0.2:
17
- return 'greek'
18
- return 'latin'
19
-
20
- def get_questions(category):
21
- """Busca as perguntas no documento remoto"""
22
- try:
23
- response = requests.get(URLS.get(category))
24
- return [l.strip() for l in response.text.splitlines() if l.strip().endswith('?')]
25
- except:
26
- return []
27
 
28
  def call_openrouter(passage, mode, category):
29
- """Processa cada pergunta individualmente para evitar cortes"""
30
  api_key = os.getenv("OPENROUTER_API_KEY")
31
  lang = detect_language(passage)
32
- questions = get_questions(category)
33
 
34
- if not questions:
 
 
 
 
 
 
 
 
 
35
  return "Erro ao carregar perguntas.", "Nenhum"
36
 
37
- model_chain = ["anthropic/claude-3.5-sonnet", "openai/gpt-4o"] if mode == "Alta Precisão (Filológica)" else ["google/gemini-flash-1.5", "mistralai/mistral-7b-instruct"]
38
 
39
- full_report = []
40
- model_used_final = "Nenhum"
41
-
42
  for q in questions:
 
 
 
43
  for model in model_chain:
44
  try:
45
- # Prompt instruindo idioma e contexto específico
46
- prompt = f"Analise este texto em {lang.upper()}: '{passage}'. Responda em PORTUGUÊS à pergunta: {q}"
47
-
48
- response = requests.post(
49
- url="https://openrouter.ai/api/v1/chat/completions",
50
  headers={"Authorization": f"Bearer {api_key}"},
51
- json={
52
- "model": model,
53
- "messages": [{"role": "user", "content": prompt}],
54
- "temperature": 0.1,
55
- "max_tokens": 800
56
- },
57
- timeout=40
58
  )
59
- if response.status_code == 200:
60
- answer = response.json()["choices"][0]["message"]["content"]
61
- full_report.append(f"Q: {q}\nA: {answer}\n")
62
- model_used_final = model
63
- break # Passa para a próxima pergunta
64
- except:
65
- continue
66
-
67
- return "\n".join(full_report), model_used_final
 
1
  import os
2
  import re
3
  import requests
 
4
 
 
5
  URLS = {
6
  "Syntax": "https://raw.githubusercontent.com/u-cl-lab/classical-query/main/syntax.txt",
7
  "Morphology": "https://raw.githubusercontent.com/u-cl-lab/classical-query/main/morphology.txt",
 
9
  }
10
 
11
  def detect_language(passage):
 
12
  greek_chars = re.findall(r'[\u0370-\u03FF\u1F00-\u1FFF]', passage)
13
+ return 'greek' if len(greek_chars) > len(passage) * 0.1 else 'latin'
 
 
 
 
 
 
 
 
 
 
14
 
15
  def call_openrouter(passage, mode, category):
 
16
  api_key = os.getenv("OPENROUTER_API_KEY")
17
  lang = detect_language(passage)
 
18
 
19
+ # Protocolo de Tradução e Especialidade (A essência do Thomas)
20
+ lang_instr = {
21
+ 'latin': "Você é um latinista experiente. Use apenas regras de gramática latina. Não confunda com grego.",
22
+ 'greek': "Você é um helenista experiente. Use apenas regras de grego antigo (incluindo o Dual e o Optativo)."
23
+ }
24
+
25
+ try:
26
+ resp_q = requests.get(URLS[category])
27
+ questions = [l.strip() for l in resp_q.text.splitlines() if l.strip().endswith('?')]
28
+ except:
29
  return "Erro ao carregar perguntas.", "Nenhum"
30
 
31
+ model_chain = ["anthropic/claude-3.5-sonnet", "openai/gpt-4o"] if "Alta" in mode else ["google/gemini-flash-1.5"]
32
 
33
+ report = []
 
 
34
  for q in questions:
35
+ # Aqui garantimos a tradução:
36
+ prompt = f"{lang_instr[lang]}\nAnalise o texto: {passage}\nResponda em PORTUGUÊS: {q}"
37
+
38
  for model in model_chain:
39
  try:
40
+ r = requests.post(
41
+ "https://openrouter.ai/api/v1/chat/completions",
 
 
 
42
  headers={"Authorization": f"Bearer {api_key}"},
43
+ json={"model": model, "messages": [{"role": "user", "content": prompt}], "temperature": 0.1},
44
+ timeout=30
 
 
 
 
 
45
  )
46
+ if r.status_code == 200:
47
+ report.append(f"PERGUNTA: {q}\nRESPOSTA: {r.json()['choices'][0]['message']['content']}\n")
48
+ break
49
+ except: continue
50
+
51
+ return "\n".join(report), model_chain[0]