Karim shoair commited on
Commit
001dbf4
·
1 Parent(s): 1a11816

fix(sessions): Fixing a bug with max pages counter

Browse files
scrapling/engines/_browsers/_base.py CHANGED
@@ -31,7 +31,7 @@ class SyncSession:
31
  def __init__(self, max_pages: int = 1):
32
  self.max_pages = max_pages
33
  self.page_pool = PagePool(max_pages)
34
- self.__max_wait_for_page = 60
35
  self.playwright: Optional[Playwright] = None
36
  self.context: Optional[BrowserContext] = None
37
  self._closed = False
@@ -50,7 +50,7 @@ class SyncSession:
50
  # If we're at max capacity after cleanup, wait for busy pages to finish
51
  if self.page_pool.pages_count >= self.max_pages:
52
  start_time = time()
53
- while time() - start_time < self.__max_wait_for_page:
54
  # Wait for any pages to finish, then clean them up
55
  sleep(0.05)
56
  self.page_pool.close_all_finished_pages()
@@ -58,7 +58,7 @@ class SyncSession:
58
  break
59
  else:
60
  raise TimeoutError(
61
- f"No pages finished to clear place in the pool within the {self.__max_wait_for_page}s timeout period"
62
  )
63
 
64
  page = self.context.new_page()
@@ -111,7 +111,7 @@ class AsyncSession(SyncSession):
111
  # If we're at max capacity after cleanup, wait for busy pages to finish
112
  if self.page_pool.pages_count >= self.max_pages:
113
  start_time = time()
114
- while time() - start_time < self.__max_wait_for_page:
115
  # Wait for any pages to finish, then clean them up
116
  await asyncio_sleep(0.05)
117
  await self.page_pool.aclose_all_finished_pages()
@@ -119,7 +119,7 @@ class AsyncSession(SyncSession):
119
  break
120
  else:
121
  raise TimeoutError(
122
- f"No pages finished to clear place in the pool within the {self.__max_wait_for_page}s timeout period"
123
  )
124
 
125
  page = await self.context.new_page()
 
31
  def __init__(self, max_pages: int = 1):
32
  self.max_pages = max_pages
33
  self.page_pool = PagePool(max_pages)
34
+ self._max_wait_for_page = 60
35
  self.playwright: Optional[Playwright] = None
36
  self.context: Optional[BrowserContext] = None
37
  self._closed = False
 
50
  # If we're at max capacity after cleanup, wait for busy pages to finish
51
  if self.page_pool.pages_count >= self.max_pages:
52
  start_time = time()
53
+ while time() - start_time < self._max_wait_for_page:
54
  # Wait for any pages to finish, then clean them up
55
  sleep(0.05)
56
  self.page_pool.close_all_finished_pages()
 
58
  break
59
  else:
60
  raise TimeoutError(
61
+ f"No pages finished to clear place in the pool within the {self._max_wait_for_page}s timeout period"
62
  )
63
 
64
  page = self.context.new_page()
 
111
  # If we're at max capacity after cleanup, wait for busy pages to finish
112
  if self.page_pool.pages_count >= self.max_pages:
113
  start_time = time()
114
+ while time() - start_time < self._max_wait_for_page:
115
  # Wait for any pages to finish, then clean them up
116
  await asyncio_sleep(0.05)
117
  await self.page_pool.aclose_all_finished_pages()
 
119
  break
120
  else:
121
  raise TimeoutError(
122
+ f"No pages finished to clear place in the pool within the {self._max_wait_for_page}s timeout period"
123
  )
124
 
125
  page = await self.context.new_page()