garvitcpp commited on
Commit
7e7b0a9
·
verified ·
1 Parent(s): 737ef25

Update services/utils/http_utils.py

Browse files
Files changed (1) hide show
  1. services/utils/http_utils.py +127 -128
services/utils/http_utils.py CHANGED
@@ -1,151 +1,150 @@
1
  import aiohttp
2
  import logging
3
- from typing import Optional, List
4
- import asyncio
5
- from fp.fp import FreeProxy
6
  import random
 
 
7
  import time
8
- import contextlib
 
 
 
9
 
10
  logger = logging.getLogger(__name__)
11
 
12
- # Cache for working proxies
13
- WORKING_PROXIES = []
14
- PROXY_REFRESH_TIME = 0
15
- PROXY_REFRESH_INTERVAL = 60 * 10 # 10 minutes
 
 
 
 
 
 
 
 
 
16
 
17
- def get_working_proxies() -> List[str]:
18
- """Get a list of working proxies"""
19
- global WORKING_PROXIES, PROXY_REFRESH_TIME
20
-
21
- current_time = time.time()
 
 
 
 
 
 
22
 
23
- # If we have proxies and they're not expired, use them
24
- if WORKING_PROXIES and (current_time - PROXY_REFRESH_TIME) < PROXY_REFRESH_INTERVAL:
25
- return WORKING_PROXIES
 
 
 
26
 
27
- # Get new proxies
28
- try:
29
- proxies = []
30
- # Try to get 5 working proxies
31
- for _ in range(5):
32
- try:
33
- proxy = FreeProxy(https=True, rand=True, timeout=1).get()
34
- if proxy and proxy not in proxies:
35
- proxies.append(proxy)
36
- except Exception:
37
- pass
38
-
39
- if proxies:
40
- WORKING_PROXIES = proxies
41
- PROXY_REFRESH_TIME = current_time
42
- logger.info(f"Refreshed proxy list, found {len(proxies)} working proxies")
43
- return WORKING_PROXIES
44
- except Exception as e:
45
- logger.error(f"Error refreshing proxy list: {e}")
46
 
47
- return WORKING_PROXIES # Return whatever we have, even if it's empty
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
  async def fetch_page(session: aiohttp.ClientSession, url: str, headers: dict) -> Optional[str]:
50
- """Fetch a page using aiohttp with free proxies and retry logic"""
51
  logger.info(f"Requesting URL: {url}")
52
 
53
- # Get list of working proxies
54
- proxies = get_working_proxies()
55
-
56
- # Try with proxies if available
57
- if proxies:
58
- # Shuffle proxies for better distribution
59
- random.shuffle(proxies)
60
-
61
- # Try each proxy until one works
62
- for proxy in proxies:
63
- try:
64
- logger.info(f"Trying with proxy: {proxy}")
65
-
66
- # Use standard session with proxy instead of RetryClient
67
- try:
68
- async with session.get(
69
- url,
70
- headers=headers,
71
- proxy=proxy,
72
- timeout=20,
73
- ssl=False # Some free proxies don't support SSL verification
74
- ) as response:
75
- # Handle 202 status code as potential success
76
- if response.status in (200, 202):
77
- content = await response.text()
78
- # Verify we got actual HTML content, not just a waiting page
79
- if len(content) > 5000 and ("<html" in content or "<!DOCTYPE" in content):
80
- logger.info(f"Successfully retrieved content via proxy")
81
- return content
82
- else:
83
- logger.warning(f"Proxy {proxy} returned too short content or non-HTML")
84
- else:
85
- logger.warning(f"Proxy {proxy} failed with status {response.status}")
86
- except Exception as e:
87
- logger.warning(f"Error using proxy {proxy}: {str(e)}")
88
- continue
89
- except Exception as e:
90
- logger.warning(f"Outer error with proxy {proxy}: {str(e)}")
91
- continue
92
-
93
- # If all proxies failed or no proxies available, try direct request with extensive disguise
94
- logger.info("All proxies failed or no proxies available, trying direct request with disguised headers")
95
- return await direct_request(session, url, headers)
96
-
97
- async def direct_request(session: aiohttp.ClientSession, url: str, headers: dict) -> Optional[str]:
98
- """Attempt a direct request with enhanced browser-like headers"""
99
- # Enhance headers to look more like a real browser
100
- enhanced_headers = headers.copy()
101
- enhanced_headers.update({
102
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
103
  "Accept-Language": "en-US,en;q=0.9",
104
  "Accept-Encoding": "gzip, deflate, br",
105
  "Connection": "keep-alive",
106
- "Cache-Control": "max-age=0",
107
- "Sec-Ch-Ua": '"Google Chrome";v="123", "Not:A-Brand";v="8"',
108
- "Sec-Ch-Ua-Mobile": "?0",
109
- "Sec-Ch-Ua-Platform": '"Windows"',
110
- "Sec-Fetch-Dest": "document",
111
- "Sec-Fetch-Mode": "navigate",
112
- "Sec-Fetch-Site": "none",
113
- "Sec-Fetch-User": "?1",
114
  "Upgrade-Insecure-Requests": "1",
 
 
115
  "Referer": "https://www.google.com/"
116
- })
 
 
117
 
118
- try:
119
- # Use standard session.get instead of RetryClient
120
- # Add retry logic manually to ensure proper cleanup
121
- max_retries = 3
122
- for attempt in range(max_retries):
123
- try:
124
- async with session.get(url, headers=enhanced_headers, timeout=20) as response:
125
- # Handle 202 status code as potential success
126
- if response.status in (200, 202):
127
- content = await response.text()
128
- # Verify we got actual HTML content, not just a waiting page
129
- if len(content) > 5000 and ("<html" in content or "<!DOCTYPE" in content):
130
- logger.info(f"Successfully retrieved content directly")
131
- return content
132
- else:
133
- logger.warning(f"Direct request returned too short content or non-HTML")
134
- else:
135
- logger.error(f"Direct request failed with status code {response.status}")
136
-
137
- # If we get here, the attempt failed, so wait before retry
138
- if attempt < max_retries - 1:
139
- delay = (attempt + 1) * 2 # Exponential backoff
140
- await asyncio.sleep(delay)
141
 
142
- except Exception as e:
143
- logger.error(f"Direct request attempt {attempt+1} failed: {e}")
144
- if attempt < max_retries - 1:
145
- delay = (attempt + 1) * 2
146
- await asyncio.sleep(delay)
 
 
 
 
 
147
 
148
- return None # All attempts failed
149
- except Exception as e:
150
- logger.error(f"All direct request attempts failed: {e}")
151
- return None
 
 
 
 
 
 
 
 
 
1
  import aiohttp
2
  import logging
 
 
 
3
  import random
4
+ import asyncio
5
+ from typing import Optional, List
6
  import time
7
+ from dotenv import load_dotenv
8
+
9
+ # Load environment variables
10
+ load_dotenv()
11
 
12
  logger = logging.getLogger(__name__)
13
 
14
+ # WebShare proxies list (format: IP:PORT:USERNAME:PASSWORD)
15
+ WEBSHARE_PROXIES = [
16
+ "198.23.239.134:6540:zvubytfw:ak6yit5k2tvj",
17
+ "207.244.217.165:6712:zvubytfw:ak6yit5k2tvj",
18
+ "107.172.163.27:6543:zvubytfw:ak6yit5k2tvj",
19
+ "161.123.152.115:6360:zvubytfw:ak6yit5k2tvj",
20
+ "23.94.138.75:6349:zvubytfw:ak6yit5k2tvj",
21
+ "216.10.27.159:6837:zvubytfw:ak6yit5k2tvj",
22
+ "136.0.207.84:6661:zvubytfw:ak6yit5k2tvj",
23
+ "64.64.118.149:6732:zvubytfw:ak6yit5k2tvj",
24
+ "142.147.128.93:6593:zvubytfw:ak6yit5k2tvj",
25
+ "154.36.110.199:6853:zvubytfw:ak6yit5k2tvj"
26
+ ]
27
 
28
+ # Track proxy usage and failures
29
+ proxy_usage_count = {}
30
+ proxy_failure_count = {}
31
+ last_proxy_index = -1
32
+
33
+ def format_proxy_url(proxy_str: str) -> str:
34
+ """Convert proxy string to proxy URL format"""
35
+ parts = proxy_str.split(':')
36
+ if len(parts) != 4:
37
+ logger.error(f"Invalid proxy format: {proxy_str}")
38
+ return None
39
 
40
+ ip, port, username, password = parts
41
+ return f"http://{username}:{password}@{ip}:{port}"
42
+
43
+ def get_next_proxy() -> str:
44
+ """Get the next proxy using a round-robin approach with failure consideration"""
45
+ global last_proxy_index
46
 
47
+ # Simple round-robin selection with failure skipping
48
+ for _ in range(len(WEBSHARE_PROXIES)):
49
+ last_proxy_index = (last_proxy_index + 1) % len(WEBSHARE_PROXIES)
50
+ proxy_str = WEBSHARE_PROXIES[last_proxy_index]
51
+
52
+ # Skip proxies with too many recent failures
53
+ if proxy_failure_count.get(proxy_str, 0) >= 3:
54
+ continue
55
+
56
+ # Track usage
57
+ proxy_usage_count[proxy_str] = proxy_usage_count.get(proxy_str, 0) + 1
58
+
59
+ return format_proxy_url(proxy_str)
 
 
 
 
 
 
60
 
61
+ # If all proxies have failures, reset failure counts and try again
62
+ proxy_failure_count.clear()
63
+ return get_next_proxy()
64
+
65
+ def mark_proxy_failure(proxy_url: str):
66
+ """Mark a proxy as having a failure"""
67
+ # Extract the original proxy string from the URL
68
+ for proxy_str in WEBSHARE_PROXIES:
69
+ if proxy_str.split(':')[0] in proxy_url and proxy_str.split(':')[2] in proxy_url:
70
+ proxy_failure_count[proxy_str] = proxy_failure_count.get(proxy_str, 0) + 1
71
+ logger.warning(f"Marked proxy as failed: {proxy_url} (failure count: {proxy_failure_count[proxy_str]})")
72
+
73
+ # Reset failure count after 10 minutes to give proxy a second chance
74
+ if proxy_failure_count[proxy_str] >= 3:
75
+ logger.warning(f"Proxy {proxy_url} has failed multiple times, cooling down")
76
+ asyncio.create_task(reset_proxy_failure(proxy_str, 600)) # 10 minutes cooldown
77
+ break
78
+
79
+ async def reset_proxy_failure(proxy_str: str, delay: int):
80
+ """Reset the failure count for a proxy after a delay"""
81
+ await asyncio.sleep(delay)
82
+ if proxy_str in proxy_failure_count:
83
+ proxy_failure_count[proxy_str] = 0
84
+ logger.info(f"Reset failure count for proxy: {proxy_str}")
85
 
86
  async def fetch_page(session: aiohttp.ClientSession, url: str, headers: dict) -> Optional[str]:
87
+ """Fetch a page using WebShare proxies with retry logic"""
88
  logger.info(f"Requesting URL: {url}")
89
 
90
+ # Enhanced headers that look more like a browser
91
+ enhanced_headers = {
92
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36",
93
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
  "Accept-Language": "en-US,en;q=0.9",
95
  "Accept-Encoding": "gzip, deflate, br",
96
  "Connection": "keep-alive",
 
 
 
 
 
 
 
 
97
  "Upgrade-Insecure-Requests": "1",
98
+ "Cache-Control": "max-age=0",
99
+ "TE": "Trailers",
100
  "Referer": "https://www.google.com/"
101
+ }
102
+ # Update with any custom headers provided
103
+ enhanced_headers.update(headers)
104
 
105
+ # Try up to 3 different proxies
106
+ max_proxy_attempts = 3
107
+
108
+ for attempt in range(max_proxy_attempts):
109
+ proxy_url = get_next_proxy()
110
+ if not proxy_url:
111
+ logger.error("Failed to get a valid proxy")
112
+ return None
113
+
114
+ logger.info(f"Using proxy {proxy_url} (attempt {attempt+1})")
115
+
116
+ try:
117
+ # Try with this proxy
118
+ async with session.get(
119
+ url,
120
+ headers=enhanced_headers,
121
+ proxy=proxy_url,
122
+ timeout=30,
123
+ allow_redirects=True
124
+ ) as response:
125
+ if response.status == 200:
126
+ content = await response.text()
 
127
 
128
+ # Verify we got actual content (common anti-bot techniques return empty pages)
129
+ if len(content) > 1000 and ("<html" in content or "<!DOCTYPE" in content):
130
+ logger.info(f"Successfully retrieved content ({len(content)} bytes)")
131
+ return content
132
+ else:
133
+ logger.warning(f"Response too small or not HTML: {len(content)} bytes")
134
+ mark_proxy_failure(proxy_url)
135
+ else:
136
+ logger.warning(f"Response status code: {response.status}")
137
+ mark_proxy_failure(proxy_url)
138
 
139
+ except (aiohttp.ClientError, asyncio.TimeoutError) as e:
140
+ logger.error(f"Request failed with proxy {proxy_url}: {str(e)}")
141
+ mark_proxy_failure(proxy_url)
142
+ except Exception as e:
143
+ logger.error(f"Unexpected error: {str(e)}")
144
+ mark_proxy_failure(proxy_url)
145
+
146
+ # Wait before trying next proxy
147
+ await asyncio.sleep(1)
148
+
149
+ logger.error("All proxy attempts failed")
150
+ return None