valouas commited on
Commit
074ccb1
·
verified ·
1 Parent(s): 7c24206

Upload bot_concours_sans_api.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. bot_concours_sans_api.py +21 -18
bot_concours_sans_api.py CHANGED
@@ -335,19 +335,14 @@ class LocalScraper:
335
  self.session = None
336
 
337
  async def __aenter__(self):
338
- import aiohttp
339
- connector = aiohttp.TCPConnector(limit=10, limit_per_host=3)
340
- timeout = aiohttp.ClientTimeout(total=30, connect=10)
341
- self.session = aiohttp.ClientSession(
342
- connector=connector,
343
- timeout=timeout,
344
- headers={'User-Agent': random.choice(USER_AGENTS)}
345
- )
346
  return self
347
 
348
  async def __aexit__(self, exc_type, exc_val, exc_tb):
349
  if self.session:
350
- await self.session.close()
351
 
352
  async def scrape_all_sources(self) -> List[Contest]:
353
  """Scrape tous les sites web localement"""
@@ -405,15 +400,15 @@ class LocalScraper:
405
  """Fetch avec retry et gestion d'erreurs"""
406
  for attempt in range(max_retries):
407
  try:
408
- async with self.session.get(url) as response:
409
- if response.status == 200:
410
- return await response.text()
411
- elif response.status == 429:
412
- wait_time = 2 ** attempt * 5
413
- logging.warning(f"Rate limited on {url}, waiting {wait_time}s")
414
- await asyncio.sleep(wait_time)
415
- else:
416
- logging.warning(f"HTTP {response.status} for {url}")
417
 
418
  except Exception as e:
419
  logging.error(f"Attempt {attempt+1} failed for {url}: {e}")
@@ -1097,6 +1092,10 @@ class ContestBotOrchestrator:
1097
 
1098
  def run_bot_cycle():
1099
  """Point d'entrée pour le scheduler"""
 
 
 
 
1100
  bot = ContestBotOrchestrator()
1101
 
1102
  # Cycle principal
@@ -1115,6 +1114,10 @@ def main():
1115
  print("✅ Aucune API externe requise")
1116
  print()
1117
 
 
 
 
 
1118
  # Vérifier les arguments de ligne de commande
1119
  if len(sys.argv) > 1 and sys.argv[1] == "--run-now":
1120
  logging.info("Running immediate cycle")
 
335
  self.session = None
336
 
337
  async def __aenter__(self):
338
+ # Utiliser requests au lieu d'aiohttp pour éviter les problèmes Windows
339
+ self.session = requests.Session()
340
+ self.session.headers.update({'User-Agent': random.choice(USER_AGENTS)})
 
 
 
 
 
341
  return self
342
 
343
  async def __aexit__(self, exc_type, exc_val, exc_tb):
344
  if self.session:
345
+ self.session.close()
346
 
347
  async def scrape_all_sources(self) -> List[Contest]:
348
  """Scrape tous les sites web localement"""
 
400
  """Fetch avec retry et gestion d'erreurs"""
401
  for attempt in range(max_retries):
402
  try:
403
+ response = self.session.get(url, timeout=30)
404
+ if response.status_code == 200:
405
+ return response.text
406
+ elif response.status_code == 429:
407
+ wait_time = 2 ** attempt * 5
408
+ logging.warning(f"Rate limited on {url}, waiting {wait_time}s")
409
+ await asyncio.sleep(wait_time)
410
+ else:
411
+ logging.warning(f"HTTP {response.status_code} for {url}")
412
 
413
  except Exception as e:
414
  logging.error(f"Attempt {attempt+1} failed for {url}: {e}")
 
1092
 
1093
  def run_bot_cycle():
1094
  """Point d'entrée pour le scheduler"""
1095
+ # Fix pour Windows - définir la politique d'événements asyncio
1096
+ if sys.platform.startswith('win'):
1097
+ asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
1098
+
1099
  bot = ContestBotOrchestrator()
1100
 
1101
  # Cycle principal
 
1114
  print("✅ Aucune API externe requise")
1115
  print()
1116
 
1117
+ # Fix pour Windows - définir la politique d'événements asyncio
1118
+ if sys.platform.startswith('win'):
1119
+ asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
1120
+
1121
  # Vérifier les arguments de ligne de commande
1122
  if len(sys.argv) > 1 and sys.argv[1] == "--run-now":
1123
  logging.info("Running immediate cycle")