Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,93 +3,116 @@ import requests
|
|
| 3 |
import threading
|
| 4 |
import time
|
| 5 |
import random
|
|
|
|
| 6 |
|
| 7 |
# ==============================================================================
|
| 8 |
# βοΈ CONFIGURATION
|
| 9 |
# ==============================================================================
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
|
|
|
|
| 14 |
proxy_storage = {
|
| 15 |
-
"
|
| 16 |
-
"
|
| 17 |
}
|
| 18 |
|
| 19 |
# ==============================================================================
|
| 20 |
-
# π΅οΈββοΈ
|
| 21 |
# ==============================================================================
|
| 22 |
def check_proxy(ip):
|
| 23 |
-
"""
|
| 24 |
proxies = {"http": f"http://{ip}", "https": f"http://{ip}"}
|
| 25 |
try:
|
| 26 |
-
# We
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
r = s.head(CHECK_URL, timeout=TIMEOUT_STRICT)
|
| 31 |
-
if r.status_code == 200:
|
| 32 |
-
return True
|
| 33 |
except:
|
| 34 |
return False
|
| 35 |
return False
|
| 36 |
|
| 37 |
-
def
|
| 38 |
while True:
|
| 39 |
-
print(f"
|
| 40 |
|
| 41 |
-
# 1.
|
| 42 |
-
|
| 43 |
-
|
|
|
|
| 44 |
if check_proxy(ip):
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
sources = [
|
| 53 |
"https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
|
| 54 |
"https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
|
| 55 |
"https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all"
|
| 56 |
]
|
| 57 |
-
for
|
| 58 |
try:
|
| 59 |
-
r = requests.get(
|
| 60 |
-
|
| 61 |
except: pass
|
| 62 |
|
| 63 |
-
# Shuffle
|
| 64 |
-
random.shuffle(
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
ip = ip.strip()
|
| 69 |
-
if ip
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
|
| 74 |
-
|
| 75 |
-
|
| 76 |
|
| 77 |
-
threading.Thread(target=refinery_loop, daemon=True).start()
|
| 78 |
|
| 79 |
# ==============================================================================
|
| 80 |
-
# π API ENDPOINT
|
| 81 |
# ==============================================================================
|
| 82 |
-
def
|
| 83 |
-
|
|
|
|
| 84 |
|
| 85 |
with gr.Blocks() as app:
|
| 86 |
-
gr.Markdown("##
|
|
|
|
| 87 |
with gr.Row():
|
| 88 |
-
|
| 89 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 90 |
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
app.load(serve_proxies, outputs=[out_json, out_status])
|
| 94 |
|
| 95 |
app.launch()
|
|
|
|
| 3 |
import threading
|
| 4 |
import time
|
| 5 |
import random
|
| 6 |
+
from datetime import datetime
|
| 7 |
|
| 8 |
# ==============================================================================
|
| 9 |
# βοΈ CONFIGURATION
|
| 10 |
# ==============================================================================
|
| 11 |
+
TARGET_COUNT = 10 # We want exactly 10 working proxies
|
| 12 |
+
TIMEOUT_SEC = 5 # Reject if slower than 5 seconds
|
| 13 |
+
CHECK_INTERVAL = 300 # Re-check every 5 minutes (300 seconds)
|
| 14 |
|
| 15 |
+
# Shared Memory (The "Live File")
|
| 16 |
proxy_storage = {
|
| 17 |
+
"valid_proxies": [],
|
| 18 |
+
"last_updated": "Not yet started"
|
| 19 |
}
|
| 20 |
|
| 21 |
# ==============================================================================
|
| 22 |
+
# π΅οΈββοΈ PROXY WORKER (Background Thread)
|
| 23 |
# ==============================================================================
|
| 24 |
def check_proxy(ip):
|
| 25 |
+
"""Returns True if proxy connects to YouTube in < 5 seconds."""
|
| 26 |
proxies = {"http": f"http://{ip}", "https": f"http://{ip}"}
|
| 27 |
try:
|
| 28 |
+
# We test against YouTube specifically because that's our target
|
| 29 |
+
r = requests.get("https://www.youtube.com", proxies=proxies, timeout=TIMEOUT_SEC)
|
| 30 |
+
if r.status_code == 200:
|
| 31 |
+
return True
|
|
|
|
|
|
|
|
|
|
| 32 |
except:
|
| 33 |
return False
|
| 34 |
return False
|
| 35 |
|
| 36 |
+
def worker_loop():
|
| 37 |
while True:
|
| 38 |
+
print(f"\n[{datetime.now().strftime('%H:%M')}] β»οΈ Starting Validation Cycle...")
|
| 39 |
|
| 40 |
+
# 1. RE-VALIDATE EXISTING (Keep the good ones)
|
| 41 |
+
current_list = proxy_storage["valid_proxies"]
|
| 42 |
+
still_good = []
|
| 43 |
+
for ip in current_list:
|
| 44 |
if check_proxy(ip):
|
| 45 |
+
still_good.append(ip)
|
| 46 |
+
else:
|
| 47 |
+
print(f" β Dropped dead proxy: {ip}")
|
| 48 |
+
|
| 49 |
+
proxy_storage["valid_proxies"] = still_good
|
| 50 |
+
|
| 51 |
+
# 2. FILL THE POOL (If we have less than 10)
|
| 52 |
+
if len(proxy_storage["valid_proxies"]) < TARGET_COUNT:
|
| 53 |
+
needed = TARGET_COUNT - len(proxy_storage["valid_proxies"])
|
| 54 |
+
print(f" π Pool low. Need {needed} more. Fetching fresh list...")
|
| 55 |
+
|
| 56 |
+
# Fetch huge raw list
|
| 57 |
+
raw_proxies = []
|
| 58 |
sources = [
|
| 59 |
"https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
|
| 60 |
"https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
|
| 61 |
"https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all"
|
| 62 |
]
|
| 63 |
+
for s in sources:
|
| 64 |
try:
|
| 65 |
+
r = requests.get(s, timeout=10)
|
| 66 |
+
raw_proxies += r.text.strip().split("\n")
|
| 67 |
except: pass
|
| 68 |
|
| 69 |
+
# Shuffle to get random ones
|
| 70 |
+
random.shuffle(raw_proxies)
|
| 71 |
+
|
| 72 |
+
# Test until we hit 10
|
| 73 |
+
for ip in raw_proxies:
|
| 74 |
ip = ip.strip()
|
| 75 |
+
if not ip or ip in proxy_storage["valid_proxies"]: continue
|
| 76 |
+
|
| 77 |
+
# Stop if we hit our target
|
| 78 |
+
if len(proxy_storage["valid_proxies"]) >= TARGET_COUNT:
|
| 79 |
+
break
|
| 80 |
+
|
| 81 |
+
print(f" Testing {ip}...", end="\r")
|
| 82 |
+
if check_proxy(ip):
|
| 83 |
+
print(f" β
FOUND NEW: {ip}")
|
| 84 |
+
proxy_storage["valid_proxies"].append(ip)
|
| 85 |
+
|
| 86 |
+
proxy_storage["last_updated"] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
| 87 |
+
print(f" π€ Sleeping for {CHECK_INTERVAL/60} mins. Current Pool: {len(proxy_storage['valid_proxies'])}")
|
| 88 |
+
|
| 89 |
+
# 3. SLEEP
|
| 90 |
+
time.sleep(CHECK_INTERVAL)
|
| 91 |
|
| 92 |
+
# Start Background Thread
|
| 93 |
+
threading.Thread(target=worker_loop, daemon=True).start()
|
| 94 |
|
|
|
|
| 95 |
|
| 96 |
# ==============================================================================
|
| 97 |
+
# π API ENDPOINT (FIXED)
|
| 98 |
# ==============================================================================
|
| 99 |
+
def get_proxies_api():
|
| 100 |
+
"""Main App calls this to get the list"""
|
| 101 |
+
return proxy_storage["valid_proxies"], f"Updated: {proxy_storage['last_updated']}"
|
| 102 |
|
| 103 |
with gr.Blocks() as app:
|
| 104 |
+
gr.Markdown("## π¦ Proxy Engine (Backend)")
|
| 105 |
+
|
| 106 |
with gr.Row():
|
| 107 |
+
json_out = gr.JSON(label="Live Proxy Pool")
|
| 108 |
+
status_out = gr.Textbox(label="Last Update")
|
| 109 |
+
|
| 110 |
+
refresh_btn = gr.Button("Refresh View")
|
| 111 |
+
|
| 112 |
+
# --- THIS IS THE FIX: We added api_name="get_proxies" ---
|
| 113 |
+
refresh_btn.click(get_proxies_api, outputs=[json_out, status_out], api_name="get_proxies")
|
| 114 |
|
| 115 |
+
# Load immediately on open
|
| 116 |
+
app.load(get_proxies_api, outputs=[json_out, status_out])
|
|
|
|
| 117 |
|
| 118 |
app.launch()
|