habulaj commited on
Commit
c84647a
·
verified ·
1 Parent(s): 768f6d9

Update routers/cronjob.py

Browse files
Files changed (1) hide show
  1. routers/cronjob.py +16 -15
routers/cronjob.py CHANGED
@@ -23,11 +23,11 @@ async def fetch_news():
23
  response = await client.get("/news")
24
  if response.status_code == 200:
25
  data = response.json()
26
- print("News fetched:", data)
27
  else:
28
- print("Erro ao buscar notícias:", response.status_code)
29
  except Exception as e:
30
- print("Erro fetch_news:", e)
31
 
32
  await asyncio.sleep(180) # 3 minutos
33
 
@@ -35,7 +35,7 @@ async def fetch_filter():
35
  async with httpx.AsyncClient(timeout=10.0, base_url=BASE_URL) as client:
36
  while not stop_flags.get("filter"):
37
  try:
38
- response = await client.get("/filter")
39
  if response.status_code == 200:
40
  data = response.json()
41
  f = data.get("filter", {})
@@ -43,15 +43,16 @@ async def fetch_filter():
43
  relevance = f.get("relevance", "low").lower()
44
  brazil_interest = f.get("brazil_interest", False)
45
 
46
- print("Filter fetched:", data)
47
 
 
48
  if not is_news_content or relevance not in ["medium", "high", "viral"] or not brazil_interest:
49
- print("Condição não satisfeita, refazendo chamada imediatamente...")
50
- continue # Repetir sem esperar
51
  else:
52
- print("Erro ao buscar filter:", response.status_code)
53
  except Exception as e:
54
- print("Erro fetch_filter:", e)
55
 
56
  await asyncio.sleep(120) # 2 minutos
57
 
@@ -59,21 +60,22 @@ async def fetch_analyze():
59
  async with httpx.AsyncClient(timeout=10.0, base_url=BASE_URL) as client:
60
  while not stop_flags.get("analyze"):
61
  try:
62
- response = await client.get("/analyze")
63
  if response.status_code == 200:
64
  data = response.json()
65
  success = data.get("rewrite_result", {}).get("success", False)
66
- print("Analyze fetched:", data)
 
67
  if not success:
68
- print("Analyze não teve sucesso, tentando novamente em 1 minuto...")
69
  await asyncio.sleep(60)
70
  continue
71
  else:
72
- print("Erro ao buscar analyze:", response.status_code)
73
  await asyncio.sleep(60)
74
  continue
75
  except Exception as e:
76
- print("Erro fetch_analyze:", e)
77
  await asyncio.sleep(60)
78
  continue
79
 
@@ -92,7 +94,6 @@ async def start_cronjob():
92
 
93
  stop_flags = {"news": False, "filter": False, "analyze": False}
94
 
95
- # Cria tarefas assíncronas
96
  tasks["news"] = asyncio.create_task(fetch_news())
97
  tasks["filter"] = asyncio.create_task(fetch_filter())
98
  tasks["analyze"] = asyncio.create_task(fetch_analyze())
 
23
  response = await client.get("/news")
24
  if response.status_code == 200:
25
  data = response.json()
26
+ print("[NEWS] Fetched:", data)
27
  else:
28
+ print("[NEWS] Erro ao buscar notícias:", response.status_code)
29
  except Exception as e:
30
+ print("[NEWS] Erro:", e)
31
 
32
  await asyncio.sleep(180) # 3 minutos
33
 
 
35
  async with httpx.AsyncClient(timeout=10.0, base_url=BASE_URL) as client:
36
  while not stop_flags.get("filter"):
37
  try:
38
+ response = await client.post("/filter")
39
  if response.status_code == 200:
40
  data = response.json()
41
  f = data.get("filter", {})
 
43
  relevance = f.get("relevance", "low").lower()
44
  brazil_interest = f.get("brazil_interest", False)
45
 
46
+ print("[FILTER] Fetched:", data)
47
 
48
+ # Repetir imediatamente se critérios não atendidos
49
  if not is_news_content or relevance not in ["medium", "high", "viral"] or not brazil_interest:
50
+ print("[FILTER] Critérios não atendidos, refazendo...")
51
+ continue
52
  else:
53
+ print("[FILTER] Erro ao buscar filter:", response.status_code)
54
  except Exception as e:
55
+ print("[FILTER] Erro:", e)
56
 
57
  await asyncio.sleep(120) # 2 minutos
58
 
 
60
  async with httpx.AsyncClient(timeout=10.0, base_url=BASE_URL) as client:
61
  while not stop_flags.get("analyze"):
62
  try:
63
+ response = await client.post("/analyze")
64
  if response.status_code == 200:
65
  data = response.json()
66
  success = data.get("rewrite_result", {}).get("success", False)
67
+ print("[ANALYZE] Fetched:", data)
68
+
69
  if not success:
70
+ print("[ANALYZE] Success=false, tentando novamente em 1 minuto...")
71
  await asyncio.sleep(60)
72
  continue
73
  else:
74
+ print("[ANALYZE] Erro ao buscar analyze:", response.status_code)
75
  await asyncio.sleep(60)
76
  continue
77
  except Exception as e:
78
+ print("[ANALYZE] Erro:", e)
79
  await asyncio.sleep(60)
80
  continue
81
 
 
94
 
95
  stop_flags = {"news": False, "filter": False, "analyze": False}
96
 
 
97
  tasks["news"] = asyncio.create_task(fetch_news())
98
  tasks["filter"] = asyncio.create_task(fetch_filter())
99
  tasks["analyze"] = asyncio.create_task(fetch_analyze())