Spaces:
Paused
Paused
File size: 5,959 Bytes
e472da6 bb783fe 9a6119e 494b253 a92ffa9 22e098f e472da6 494b253 e472da6 bb783fe e472da6 494b253 e472da6 494b253 e472da6 494b253 bb783fe 9a6119e e472da6 9a6119e e472da6 bb783fe e472da6 494b253 e472da6 494b253 df57cfc e601c7e 7051f5e e601c7e 62dc27b 494b253 df57cfc 494b253 9a6119e 494b253 bb783fe 494b253 9a6119e 494b253 bb783fe 494b253 bb783fe e472da6 bb783fe e472da6 9ee3f08 e472da6 bb783fe e472da6 bb783fe e472da6 bb783fe e472da6 bb783fe e472da6 bb783fe e472da6 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 | import gradio as gr
import requests
import threading
import time
import random
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor, as_completed
# ==============================================================================
# βοΈ CONFIGURATION (RAM EATER EDITION)
# ==============================================================================
TARGET_COUNT = 200 # Keep 20 Elite Proxies
TIMEOUT_SEC = 3 # β‘ Speed: Kill connection if no response in 3s (was 5s)
CHECK_INTERVAL = 10 # Re-check every 2 mins
MAX_THREADS = 2000 # π AGGRESSIVE: 300 checks at once (Uses more RAM/CPU)
# Shared Memory
proxy_storage = {
"valid_proxies": [],
"last_updated": "Not yet started"
}
# ==============================================================================
# π΅οΈββοΈ PROXY WORKER (Aggressive Multithreading)
# ==============================================================================
def check_proxy(ip):
"""Returns IP if valid and FAST."""
# Aggressive checking against YouTube
proxies = {"http": f"http://{ip}", "https": f"http://{ip}"}
try:
# We use a short timeout to filter out slow proxies immediately
r = requests.get("https://www.youtube.com", proxies=proxies, timeout=TIMEOUT_SEC)
if r.status_code == 200:
return ip
except:
pass
return None
def worker_loop():
while True:
print(f"\n[{datetime.now().strftime('%H:%M:%S')}] β’οΈ Starting AGGRESSIVE Scan (Threads: {MAX_THREADS})...")
# 1. MASSIVE FETCH (Fill RAM with Candidates)
# We fetch from more sources to give the threads more fuel
raw_proxies = []
sources = [
"https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
"https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
"https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all",
"https://raw.githubusercontent.com/shiftytr/proxy-list/master/proxy.txt",
"https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt"
"https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all",
"https://raw.githubusercontent.com/shiftytr/proxy-list/master/proxy.txt",
"https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt",
"https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt"
"https://raw.githubusercontent.com/jetkai/proxy-list/main/online-proxies/txt/proxies-http.txt",
]
print(" π₯ Downloading huge proxy lists into RAM...")
# Quick fetch loop
for s in sources:
try:
r = requests.get(s, timeout=5)
if r.status_code == 200:
raw_proxies += r.text.strip().split("\n")
except: pass
# Deduplicate and Shuffle
raw_proxies = list(set(raw_proxies))
random.shuffle(raw_proxies)
print(f" π₯ RAM Loaded with {len(raw_proxies)} candidates.")
# 2. RE-VALIDATE EXISTING + CHECK NEW (Mixed Pool)
# We combine existing good proxies with new ones to check them all at high speed
check_list = proxy_storage["valid_proxies"] + raw_proxies
# Limit checking to first 5000 to prevent freezing, but high enough to use resources
check_list = check_list[:5000]
new_valid_pool = []
print(f" π Launching {MAX_THREADS} parallel threads...")
with ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
future_to_ip = {executor.submit(check_proxy, ip): ip for ip in check_list}
for future in as_completed(future_to_ip):
result = future.result()
if result:
if result not in new_valid_pool:
print(f" β
CAPTURED: {result}")
new_valid_pool.append(result)
# STOP early if we have enough good ones (to save time)
# But since you want to utilize RAM/Speed, maybe we keep going a bit longer
if len(new_valid_pool) >= TARGET_COUNT + 5: # Buffer of 5 extra
print(" π― Target hit! Stopping scan early.")
executor.shutdown(wait=False, cancel_futures=True)
break
# Update Storage
proxy_storage["valid_proxies"] = new_valid_pool[:TARGET_COUNT]
proxy_storage["last_updated"] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(f" π€ Sleeping for {CHECK_INTERVAL}s. Active Pool: {len(proxy_storage['valid_proxies'])}")
time.sleep(CHECK_INTERVAL)
# Start Background Thread
threading.Thread(target=worker_loop, daemon=True).start()
# ==============================================================================
# π API ENDPOINT (FIXED)
# ==============================================================================
def get_proxies_api():
"""Main App calls this to get the list"""
return proxy_storage["valid_proxies"], f"Updated: {proxy_storage['last_updated']}"
with gr.Blocks() as app:
gr.Markdown("## π¦ Proxy Engine (Backend)")
with gr.Row():
json_out = gr.JSON(label="Live Proxy Pool")
status_out = gr.Textbox(label="Last Update")
refresh_btn = gr.Button("Refresh View")
# --- THIS IS THE FIX: We added api_name="get_proxies" ---
refresh_btn.click(get_proxies_api, outputs=[json_out, status_out], api_name="get_proxies")
# Load immediately on open
app.load(get_proxies_api, outputs=[json_out, status_out])
app.launch() |