File size: 5,311 Bytes
612285a | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 | from playwright.async_api import async_playwright
import asyncio
import random
import logging
from typing import Optional, List
logger = logging.getLogger(__name__)
# Your WebShare proxies
WEBSHARE_PROXIES = [
"198.23.239.134:6540:zvubytfw:ak6yit5k2tvj",
"207.244.217.165:6712:zvubytfw:ak6yit5k2tvj",
"107.172.163.27:6543:zvubytfw:ak6yit5k2tvj",
"161.123.152.115:6360:zvubytfw:ak6yit5k2tvj",
"23.94.138.75:6349:zvubytfw:ak6yit5k2tvj",
"216.10.27.159:6837:zvubytfw:ak6yit5k2tvj",
"136.0.207.84:6661:zvubytfw:ak6yit5k2tvj",
"64.64.118.149:6732:zvubytfw:ak6yit5k2tvj",
"142.147.128.93:6593:zvubytfw:ak6yit5k2tvj",
"154.36.110.199:6853:zvubytfw:ak6yit5k2tvj"
]
# Track proxy failures
proxy_failures = {}
def get_random_proxy() -> List[str]:
"""Get a random proxy from the list, avoiding those with failures"""
available_proxies = [p for p in WEBSHARE_PROXIES if proxy_failures.get(p, 0) < 3]
if not available_proxies:
# Reset failures if all proxies have failed
for proxy in WEBSHARE_PROXIES:
proxy_failures[proxy] = 0
available_proxies = WEBSHARE_PROXIES
return random.choice(available_proxies)
def mark_proxy_failure(proxy_str: str) -> None:
"""Mark a proxy as failing"""
proxy_failures[proxy_str] = proxy_failures.get(proxy_str, 0) + 1
logger.warning(f"Marked proxy as failed: {proxy_str} (failure count: {proxy_failures[proxy_str]})")
if proxy_failures[proxy_str] >= 3:
logger.warning(f"Proxy {proxy_str} has failed multiple times, will not use for 5 minutes")
asyncio.create_task(reset_proxy_after_delay(proxy_str))
async def reset_proxy_after_delay(proxy_str: str) -> None:
"""Reset a proxy's failure count after a delay"""
await asyncio.sleep(300) # 5 minutes
if proxy_str in proxy_failures:
proxy_failures[proxy_str] = 0
logger.info(f"Reset failure count for proxy: {proxy_str}")
async def fetch_page_with_browser(url: str, user_agent: str) -> Optional[str]:
"""Fetch a page using Playwright with a proxy"""
logger.info(f"Requesting URL with browser: {url}")
# Try up to 2 different proxies
for attempt in range(2):
proxy_str = get_random_proxy()
ip, port, username, password = proxy_str.split(':')
logger.info(f"Using proxy {ip}:{port} (attempt {attempt+1})")
try:
async with async_playwright() as p:
browser = await p.chromium.launch(
headless=True,
proxy={
"server": f"http://{ip}:{port}",
"username": username,
"password": password
}
)
# Create context with realistic settings
context = await browser.new_context(
viewport={"width": 1920, "height": 1080},
user_agent=user_agent
)
# Apply stealth mode
await context.add_init_script("""
Object.defineProperty(navigator, 'webdriver', {
get: () => false,
});
""")
# Create page and navigate
page = await context.new_page()
response = await page.goto(url, wait_until="networkidle", timeout=30000)
if response and response.status in [200, 202]:
# Wait a bit for any dynamic content to load
await asyncio.sleep(3)
# Get the page HTML
html = await page.content()
# Check if we got proper content
if len(html) > 5000 and ("<html" in html or "<!DOCTYPE" in html):
if "searchresults" in url or "search" in url:
# For search pages, ensure we have property listings
await page.wait_for_timeout(2000) # Wait a bit longer for search results
has_results = await page.query_selector("[data-testid='property-card'], .sr_property_block, .sr_item")
if has_results:
logger.info(f"Successfully retrieved search results ({len(html)} bytes)")
await browser.close()
return html
else:
logger.warning("No property cards found in search results")
else:
# For hotel detail pages, just return the content
logger.info(f"Successfully retrieved content ({len(html)} bytes)")
await browser.close()
return html
await browser.close()
except Exception as e:
logger.error(f"Browser request failed: {str(e)}")
mark_proxy_failure(proxy_str)
# Wait before trying another proxy
await asyncio.sleep(2)
logger.error("All browser attempts failed")
return None |