emergency / app.py
lexicalspace's picture
Create app.py
ad0eb37 verified
import gradio as gr
import requests
import threading
import time
import random
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor, as_completed
# ==============================================================================
# βš™οΈ CONFIGURATION (SAFE MODE)
# ==============================================================================
TARGET_COUNT = 200
TIMEOUT_SEC = 5
CHECK_INTERVAL = 30
MAX_THREADS = 40 # ⚠️ REDUCED to 40. Start small. If it works, try 80.
# Shared Memory
proxy_storage = {
"valid_proxies": [],
"last_updated": "Waiting for startup..."
}
# ==============================================================================
# πŸ•΅οΈβ€β™‚οΈ PROXY WORKER
# ==============================================================================
def check_proxy(ip):
"""Returns IP if valid and FAST."""
proxies = {"http": f"http://{ip}", "https": f"http://{ip}"}
try:
r = requests.get("https://www.youtube.com", proxies=proxies, timeout=TIMEOUT_SEC)
if r.status_code == 200:
return ip
except:
pass
return None
def worker_loop():
# πŸ›‘ CRITICAL FIX: Sleep 15s to let Gradio start first!
print("⏳ Worker waiting 15s for Gradio to boot...")
time.sleep(15)
while True:
print(f"\n[{datetime.now().strftime('%H:%M:%S')}] ☒️ Starting Scan...")
# 1. FETCH SOURCES (Fixed Syntax Errors)
raw_proxies = []
sources = [
"https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
"https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
"https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all",
"https://raw.githubusercontent.com/shiftytr/proxy-list/master/proxy.txt",
"https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt",
"https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt",
"https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt",
"https://raw.githubusercontent.com/jetkai/proxy-list/main/online-proxies/txt/proxies-http.txt"
]
print(" πŸ“₯ Downloading proxy lists...")
for s in sources:
try:
r = requests.get(s, timeout=5)
if r.status_code == 200:
raw_proxies += r.text.strip().split("\n")
except: pass
raw_proxies = list(set(raw_proxies))
random.shuffle(raw_proxies)
# 2. VALIDATE
check_list = proxy_storage["valid_proxies"] + raw_proxies
check_list = check_list[:2000] # Hard limit to save RAM
new_valid_pool = []
print(f" πŸš€ Scanning {len(check_list)} proxies with {MAX_THREADS} threads...")
with ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
future_to_ip = {executor.submit(check_proxy, ip): ip for ip in check_list}
for future in as_completed(future_to_ip):
result = future.result()
if result:
if result not in new_valid_pool:
new_valid_pool.append(result)
if len(new_valid_pool) >= TARGET_COUNT:
executor.shutdown(wait=False, cancel_futures=True)
break
proxy_storage["valid_proxies"] = new_valid_pool
proxy_storage["last_updated"] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(f" βœ… Update Complete. Found {len(new_valid_pool)} proxies.")
time.sleep(CHECK_INTERVAL)
# Start Background Thread
threading.Thread(target=worker_loop, daemon=True).start()
# ==============================================================================
# πŸ”Œ API ENDPOINT
# ==============================================================================
def get_proxies_api():
return proxy_storage["valid_proxies"], f"Updated: {proxy_storage['last_updated']}"
with gr.Blocks() as app:
gr.Markdown("## 🚦 Proxy Engine (Safe Mode)")
with gr.Row():
json_out = gr.JSON(label="Live Proxy Pool")
status_out = gr.Textbox(label="Last Update")
refresh_btn = gr.Button("Refresh View")
refresh_btn.click(get_proxies_api, outputs=[json_out, status_out], api_name="get_proxies")
app.load(get_proxies_api, outputs=[json_out, status_out])
# Explicitly set server settings to ensure HF can find the app
app.launch(server_name="0.0.0.0", server_port=7860)