Uhhy commited on
Commit
3ee6bf9
verified
1 Parent(s): 715dcb8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -13
app.py CHANGED
@@ -151,15 +151,16 @@ async def fetch_proxies_from_sources():
151
  if 'free-proxy-list.net' in url:
152
  soup = BeautifulSoup(response_text, 'html.parser')
153
  table = soup.find('table', {'id': 'proxylisttable'})
154
- for row in table.find_all('tr')[1:]:
155
- try:
156
- columns = row.find_all('td')
157
- ip = columns[0].text.strip()
158
- port = columns[1].text.strip()
159
- proxy = f"{ip}:{port}"
160
- proxies.add(proxy)
161
- except:
162
- pass
 
163
  else:
164
  lines = response_text.splitlines()
165
  new_proxies = {line.strip() for line in lines if line.strip()}
@@ -171,8 +172,12 @@ async def fetch_proxies_from_sources():
171
  valid_proxies[proxy] = True
172
  else:
173
  invalid_proxies[proxy] = True
 
174
 
175
- asyncio.create_task(verify_proxies_in_background())
 
 
 
176
 
177
  def fetch_response(url):
178
  headers = create_headers()
@@ -183,9 +188,6 @@ def fetch_response(url):
183
  except requests.RequestException:
184
  return None
185
 
186
- @app.on_event("startup")
187
- async def on_startup():
188
- await fetch_proxies_from_sources()
189
 
190
  @app.get("/")
191
  async def root():
 
151
  if 'free-proxy-list.net' in url:
152
  soup = BeautifulSoup(response_text, 'html.parser')
153
  table = soup.find('table', {'id': 'proxylisttable'})
154
+ if table: # Verifica si se encontr贸 la tabla
155
+ for row in table.find_all('tr')[1:]:
156
+ try:
157
+ columns = row.find_all('td')
158
+ ip = columns[0].text.strip()
159
+ port = columns[1].text.strip()
160
+ proxy = f"{ip}:{port}"
161
+ proxies.add(proxy)
162
+ except:
163
+ pass
164
  else:
165
  lines = response_text.splitlines()
166
  new_proxies = {line.strip() for line in lines if line.strip()}
 
172
  valid_proxies[proxy] = True
173
  else:
174
  invalid_proxies[proxy] = True
175
+ return valid_proxies, invalid_proxies
176
 
177
+ @app.on_event("startup")
178
+ async def on_startup():
179
+ global valid_proxies, invalid_proxies
180
+ valid_proxies, invalid_proxies = await fetch_proxies_from_sources()
181
 
182
  def fetch_response(url):
183
  headers = create_headers()
 
188
  except requests.RequestException:
189
  return None
190
 
 
 
 
191
 
192
  @app.get("/")
193
  async def root():