Update app.py
Browse files
app.py
CHANGED
|
@@ -9,23 +9,23 @@ from aiohttp import web, ClientTimeout, TCPConnector
|
|
| 9 |
from urllib.parse import parse_qs
|
| 10 |
from cachetools import TTLCache
|
| 11 |
|
| 12 |
-
#
|
| 13 |
-
|
|
|
|
| 14 |
|
| 15 |
async def fetch_url(url, session, max_retries=3):
|
| 16 |
-
headers = {
|
| 17 |
-
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36'
|
| 18 |
-
}
|
| 19 |
for attempt in range(max_retries):
|
| 20 |
try:
|
| 21 |
-
async with session.get(url, headers=headers, timeout=ClientTimeout(total=
|
| 22 |
response.raise_for_status()
|
| 23 |
-
|
|
|
|
| 24 |
except aiohttp.ClientError as e:
|
| 25 |
print(f"Attempt {attempt + 1} failed: {str(e)}", flush=True)
|
| 26 |
if attempt == max_retries - 1:
|
| 27 |
raise
|
| 28 |
-
await asyncio.sleep(1)
|
| 29 |
|
| 30 |
async def extract_and_transform_proxies(input_text):
|
| 31 |
try:
|
|
@@ -50,80 +50,37 @@ async def extract_and_transform_proxies(input_text):
|
|
| 50 |
transformed_proxies = []
|
| 51 |
|
| 52 |
for proxy in proxies_list:
|
| 53 |
-
if proxy.get('type')
|
| 54 |
name = proxy.get('name', '').strip()
|
| 55 |
server = proxy.get('server', '').strip()
|
| 56 |
port = str(proxy.get('port', '')).strip()
|
| 57 |
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
if 'cipher' in proxy:
|
| 61 |
-
ss_parts.append(f"encrypt-method={proxy['cipher'].strip()}")
|
| 62 |
-
if 'password' in proxy:
|
| 63 |
-
ss_parts.append(f"password={proxy['password'].strip()}")
|
| 64 |
-
if 'udp' in proxy:
|
| 65 |
-
ss_parts.append(f"udp-relay={'true' if proxy['udp'] in [True, 'true', 'True'] else 'false'}")
|
| 66 |
-
|
| 67 |
-
transformed = ", ".join(ss_parts)
|
| 68 |
-
transformed_proxies.append(transformed)
|
| 69 |
-
|
| 70 |
-
elif proxy.get('type') == 'trojan':
|
| 71 |
-
name = proxy.get('name', '').strip()
|
| 72 |
-
server = proxy.get('server', '').strip()
|
| 73 |
-
port = str(proxy.get('port', '')).strip()
|
| 74 |
|
| 75 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
|
| 77 |
-
if 'password' in proxy:
|
| 78 |
-
trojan_parts.append(f"password={proxy['password'].strip()}")
|
| 79 |
-
if 'sni' in proxy:
|
| 80 |
-
trojan_parts.append(f"sni={proxy['sni'].strip()}")
|
| 81 |
-
if 'skip-cert-verify' in proxy:
|
| 82 |
-
trojan_parts.append(f"skip-cert-verify={str(proxy['skip-cert-verify']).lower()}")
|
| 83 |
if 'udp' in proxy:
|
| 84 |
-
|
| 85 |
|
| 86 |
-
|
| 87 |
-
transformed_proxies.append(transformed)
|
| 88 |
|
| 89 |
return "\n".join(transformed_proxies) if transformed_proxies else "未找到有效的SS或Trojan代理配置"
|
| 90 |
|
| 91 |
-
async def log_request(request, response):
|
| 92 |
-
timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
| 93 |
-
client_ip = request.remote
|
| 94 |
-
request_line = f"{request.method} {request.path}"
|
| 95 |
-
if request.query_string:
|
| 96 |
-
request_line += f"?{request.query_string}"
|
| 97 |
-
status_code = response.status
|
| 98 |
-
content_length = response.content_length
|
| 99 |
-
|
| 100 |
-
log_message = f"{timestamp} - {client_ip} - \"{request_line}\" {status_code} {content_length}"
|
| 101 |
-
print(log_message, flush=True)
|
| 102 |
-
|
| 103 |
-
@web.middleware
|
| 104 |
-
async def logging_middleware(request, handler):
|
| 105 |
-
start_time = datetime.datetime.now()
|
| 106 |
-
try:
|
| 107 |
-
response = await handler(request)
|
| 108 |
-
await log_request(request, response)
|
| 109 |
-
end_time = datetime.datetime.now()
|
| 110 |
-
print(f"Request processing time: {end_time - start_time}", flush=True)
|
| 111 |
-
return response
|
| 112 |
-
except Exception as e:
|
| 113 |
-
end_time = datetime.datetime.now()
|
| 114 |
-
print(f"Error occurred: {str(e)}", flush=True)
|
| 115 |
-
print(f"Request processing time: {end_time - start_time}", flush=True)
|
| 116 |
-
print("Traceback:", flush=True)
|
| 117 |
-
traceback.print_exc()
|
| 118 |
-
return web.Response(text=f"Internal Server Error: {str(e)}", status=500)
|
| 119 |
-
|
| 120 |
async def handle_request(request):
|
| 121 |
if request.path == '/':
|
| 122 |
query_params = parse_qs(request.query_string)
|
| 123 |
if 'url' in query_params:
|
| 124 |
url = query_params['url'][0]
|
| 125 |
|
| 126 |
-
# 检查缓存
|
| 127 |
if url in cache:
|
| 128 |
print(f"Cache hit for URL: {url}", flush=True)
|
| 129 |
return web.Response(text=cache[url], content_type='text/plain')
|
|
@@ -136,7 +93,6 @@ async def handle_request(request):
|
|
| 136 |
result = await extract_and_transform_proxies(input_text)
|
| 137 |
print(f"Transformed result length: {len(result)}", flush=True)
|
| 138 |
|
| 139 |
-
# 将结果存入缓存
|
| 140 |
cache[url] = result
|
| 141 |
|
| 142 |
return web.Response(text=result, content_type='text/plain')
|
|
@@ -159,11 +115,27 @@ async def handle_request(request):
|
|
| 159 |
return web.Response(text="Not Found", status=404)
|
| 160 |
|
| 161 |
async def init_app():
|
| 162 |
-
app = web.Application(middlewares=[logging_middleware])
|
| 163 |
app.router.add_get('/', handle_request)
|
| 164 |
return app
|
| 165 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 166 |
if __name__ == "__main__":
|
| 167 |
print(f"===== Application Startup at {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')} =====")
|
| 168 |
print("Server running on port 8080")
|
| 169 |
-
web.run_app(init_app(), port=8080, print=lambda _: None)
|
|
|
|
| 9 |
from urllib.parse import parse_qs
|
| 10 |
from cachetools import TTLCache
|
| 11 |
|
| 12 |
+
cache = TTLCache(maxsize=1000, ttl=1800) # 30 minutes cache
|
| 13 |
+
|
| 14 |
+
CHROME_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
| 15 |
|
| 16 |
async def fetch_url(url, session, max_retries=3):
|
| 17 |
+
headers = {"User-Agent": CHROME_USER_AGENT}
|
|
|
|
|
|
|
| 18 |
for attempt in range(max_retries):
|
| 19 |
try:
|
| 20 |
+
async with session.get(url, headers=headers, timeout=ClientTimeout(total=40)) as response:
|
| 21 |
response.raise_for_status()
|
| 22 |
+
content = await response.read()
|
| 23 |
+
return content.decode('utf-8', errors='ignore')
|
| 24 |
except aiohttp.ClientError as e:
|
| 25 |
print(f"Attempt {attempt + 1} failed: {str(e)}", flush=True)
|
| 26 |
if attempt == max_retries - 1:
|
| 27 |
raise
|
| 28 |
+
await asyncio.sleep(1)
|
| 29 |
|
| 30 |
async def extract_and_transform_proxies(input_text):
|
| 31 |
try:
|
|
|
|
| 50 |
transformed_proxies = []
|
| 51 |
|
| 52 |
for proxy in proxies_list:
|
| 53 |
+
if proxy.get('type') in ['ss', 'trojan']:
|
| 54 |
name = proxy.get('name', '').strip()
|
| 55 |
server = proxy.get('server', '').strip()
|
| 56 |
port = str(proxy.get('port', '')).strip()
|
| 57 |
|
| 58 |
+
parts = [f"{name} = {proxy['type']}, {server}, {port}"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 59 |
|
| 60 |
+
if proxy['type'] == 'ss':
|
| 61 |
+
if 'cipher' in proxy:
|
| 62 |
+
parts.append(f"encrypt-method={proxy['cipher'].strip()}")
|
| 63 |
+
elif proxy['type'] == 'trojan':
|
| 64 |
+
if 'password' in proxy:
|
| 65 |
+
parts.append(f"password={proxy['password'].strip()}")
|
| 66 |
+
if 'sni' in proxy:
|
| 67 |
+
parts.append(f"sni={proxy['sni'].strip()}")
|
| 68 |
+
if 'skip-cert-verify' in proxy:
|
| 69 |
+
parts.append(f"skip-cert-verify={str(proxy['skip-cert-verify']).lower()}")
|
| 70 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 71 |
if 'udp' in proxy:
|
| 72 |
+
parts.append(f"udp-relay={'true' if proxy['udp'] in [True, 'true', 'True'] else 'false'}")
|
| 73 |
|
| 74 |
+
transformed_proxies.append(", ".join(parts))
|
|
|
|
| 75 |
|
| 76 |
return "\n".join(transformed_proxies) if transformed_proxies else "未找到有效的SS或Trojan代理配置"
|
| 77 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
async def handle_request(request):
|
| 79 |
if request.path == '/':
|
| 80 |
query_params = parse_qs(request.query_string)
|
| 81 |
if 'url' in query_params:
|
| 82 |
url = query_params['url'][0]
|
| 83 |
|
|
|
|
| 84 |
if url in cache:
|
| 85 |
print(f"Cache hit for URL: {url}", flush=True)
|
| 86 |
return web.Response(text=cache[url], content_type='text/plain')
|
|
|
|
| 93 |
result = await extract_and_transform_proxies(input_text)
|
| 94 |
print(f"Transformed result length: {len(result)}", flush=True)
|
| 95 |
|
|
|
|
| 96 |
cache[url] = result
|
| 97 |
|
| 98 |
return web.Response(text=result, content_type='text/plain')
|
|
|
|
| 115 |
return web.Response(text="Not Found", status=404)
|
| 116 |
|
| 117 |
async def init_app():
|
| 118 |
+
app = web.Application(middlewares=[web.middleware(lambda _, handler: logging_middleware(handler))])
|
| 119 |
app.router.add_get('/', handle_request)
|
| 120 |
return app
|
| 121 |
|
| 122 |
+
async def logging_middleware(handler, request):
|
| 123 |
+
start_time = datetime.datetime.now()
|
| 124 |
+
try:
|
| 125 |
+
response = await handler(request)
|
| 126 |
+
end_time = datetime.datetime.now()
|
| 127 |
+
print(f"{end_time.strftime('%Y-%m-%d %H:%M:%S')} - {request.remote} - \"{request.method} {request.path}\" {response.status} {response.content_length}", flush=True)
|
| 128 |
+
print(f"Request processing time: {end_time - start_time}", flush=True)
|
| 129 |
+
return response
|
| 130 |
+
except Exception as e:
|
| 131 |
+
end_time = datetime.datetime.now()
|
| 132 |
+
print(f"Error occurred: {str(e)}", flush=True)
|
| 133 |
+
print(f"Request processing time: {end_time - start_time}", flush=True)
|
| 134 |
+
print("Traceback:", flush=True)
|
| 135 |
+
traceback.print_exc()
|
| 136 |
+
return web.Response(text=f"Internal Server Error: {str(e)}", status=500)
|
| 137 |
+
|
| 138 |
if __name__ == "__main__":
|
| 139 |
print(f"===== Application Startup at {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')} =====")
|
| 140 |
print("Server running on port 8080")
|
| 141 |
+
web.run_app(init_app(), port=8080, print=lambda _: None)
|