lexicalspace commited on
Commit
f4e1cee
Β·
verified Β·
1 Parent(s): 5bcf3b1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -73
app.py CHANGED
@@ -3,116 +3,93 @@ import requests
3
  import threading
4
  import time
5
  import random
6
- from datetime import datetime
7
 
8
  # ==============================================================================
9
  # βš™οΈ CONFIGURATION
10
  # ==============================================================================
11
- TARGET_COUNT = 20 # We want exactly 10 working proxies
12
- TIMEOUT_SEC = 5 # Reject if slower than 5 seconds
13
- CHECK_INTERVAL = 300 # Re-check every 5 minutes (300 seconds)
14
 
15
- # Shared Memory (The "Live File")
16
  proxy_storage = {
17
- "valid_proxies": [],
18
- "last_updated": "Not yet started"
19
  }
20
 
21
  # ==============================================================================
22
- # πŸ•΅οΈβ€β™‚οΈ PROXY WORKER (Background Thread)
23
  # ==============================================================================
24
  def check_proxy(ip):
25
- """Returns True if proxy connects to YouTube in < 5 seconds."""
26
  proxies = {"http": f"http://{ip}", "https": f"http://{ip}"}
27
  try:
28
- # We test against YouTube specifically because that's our target
29
- r = requests.get("https://www.youtube.com", proxies=proxies, timeout=TIMEOUT_SEC)
30
- if r.status_code == 200:
31
- return True
 
 
 
32
  except:
33
  return False
34
  return False
35
 
36
- def worker_loop():
37
  while True:
38
- print(f"\n[{datetime.now().strftime('%H:%M')}] ♻️ Starting Validation Cycle...")
39
 
40
- # 1. RE-VALIDATE EXISTING (Keep the good ones)
41
- current_list = proxy_storage["valid_proxies"]
42
- still_good = []
43
- for ip in current_list:
44
  if check_proxy(ip):
45
- still_good.append(ip)
46
- else:
47
- print(f" ❌ Dropped dead proxy: {ip}")
48
-
49
- proxy_storage["valid_proxies"] = still_good
50
-
51
- # 2. FILL THE POOL (If we have less than 10)
52
- if len(proxy_storage["valid_proxies"]) < TARGET_COUNT:
53
- needed = TARGET_COUNT - len(proxy_storage["valid_proxies"])
54
- print(f" πŸ“‰ Pool low. Need {needed} more. Fetching fresh list...")
55
-
56
- # Fetch huge raw list
57
- raw_proxies = []
58
  sources = [
59
  "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
60
  "https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
61
  "https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all"
62
  ]
63
- for s in sources:
64
  try:
65
- r = requests.get(s, timeout=10)
66
- raw_proxies += r.text.strip().split("\n")
67
  except: pass
68
 
69
- # Shuffle to get random ones
70
- random.shuffle(raw_proxies)
71
-
72
- # Test until we hit 10
73
- for ip in raw_proxies:
74
- ip = ip.strip()
75
- if not ip or ip in proxy_storage["valid_proxies"]: continue
76
 
77
- # Stop if we hit our target
78
- if len(proxy_storage["valid_proxies"]) >= TARGET_COUNT:
79
- break
80
-
81
- print(f" Testing {ip}...", end="\r")
82
- if check_proxy(ip):
83
- print(f" βœ… FOUND NEW: {ip}")
84
- proxy_storage["valid_proxies"].append(ip)
85
-
86
- proxy_storage["last_updated"] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
87
- print(f" πŸ’€ Sleeping for {CHECK_INTERVAL/60} mins. Current Pool: {len(proxy_storage['valid_proxies'])}")
88
-
89
- # 3. SLEEP
90
- time.sleep(CHECK_INTERVAL)
91
 
92
- # Start Background Thread
93
- threading.Thread(target=worker_loop, daemon=True).start()
94
 
 
95
 
96
  # ==============================================================================
97
- # πŸ”Œ API ENDPOINT (FIXED)
98
  # ==============================================================================
99
- def get_proxies_api():
100
- """Main App calls this to get the list"""
101
- return proxy_storage["valid_proxies"], f"Updated: {proxy_storage['last_updated']}"
102
 
103
  with gr.Blocks() as app:
104
- gr.Markdown("## 🚦 Proxy Engine (Backend)")
105
-
106
  with gr.Row():
107
- json_out = gr.JSON(label="Live Proxy Pool")
108
- status_out = gr.Textbox(label="Last Update")
109
-
110
- refresh_btn = gr.Button("Refresh View")
111
-
112
- # --- THIS IS THE FIX: We added api_name="get_proxies" ---
113
- refresh_btn.click(get_proxies_api, outputs=[json_out, status_out], api_name="get_proxies")
114
 
115
- # Load immediately on open
116
- app.load(get_proxies_api, outputs=[json_out, status_out])
 
117
 
118
  app.launch()
 
3
  import threading
4
  import time
5
  import random
 
6
 
7
  # ==============================================================================
8
  # βš™οΈ CONFIGURATION
9
  # ==============================================================================
10
+ TARGET_POOL_SIZE = 15 # Keep 15 good proxies ready
11
+ TIMEOUT_STRICT = 5 # Reject anything slower than 5 seconds
12
+ CHECK_URL = "https://m.youtube.com" # We check against YouTube Mobile (faster)
13
 
 
14
  proxy_storage = {
15
+ "pool": [],
16
+ "last_update": "Initializing..."
17
  }
18
 
19
  # ==============================================================================
20
+ # πŸ•΅οΈβ€β™‚οΈ REFINERY WORKER
21
  # ==============================================================================
22
  def check_proxy(ip):
23
+ """Strictly checks if proxy can load YouTube."""
24
  proxies = {"http": f"http://{ip}", "https": f"http://{ip}"}
25
  try:
26
+ # We use a Session for speed
27
+ with requests.Session() as s:
28
+ s.proxies.update(proxies)
29
+ # HEAD request is faster than GET
30
+ r = s.head(CHECK_URL, timeout=TIMEOUT_STRICT)
31
+ if r.status_code == 200:
32
+ return True
33
  except:
34
  return False
35
  return False
36
 
37
+ def refinery_loop():
38
  while True:
39
+ print(f"♻️ Refining Pool... Current: {len(proxy_storage['pool'])}")
40
 
41
+ # 1. Clean existing pool (Re-check)
42
+ valid_proxies = []
43
+ for ip in proxy_storage['pool']:
 
44
  if check_proxy(ip):
45
+ valid_proxies.append(ip)
46
+ proxy_storage['pool'] = valid_proxies
47
+
48
+ # 2. Refill if needed
49
+ if len(proxy_storage['pool']) < TARGET_POOL_SIZE:
50
+ print(" πŸ“‰ Low fuel. Fetching fresh proxies...")
51
+ raw_list = []
 
 
 
 
 
 
52
  sources = [
53
  "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
54
  "https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt",
55
  "https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all"
56
  ]
57
+ for url in sources:
58
  try:
59
+ r = requests.get(url, timeout=10)
60
+ raw_list += r.text.strip().split("\n")
61
  except: pass
62
 
63
+ # Shuffle and Test
64
+ random.shuffle(raw_list)
65
+ for ip in raw_list:
66
+ if len(proxy_storage['pool']) >= TARGET_POOL_SIZE: break
 
 
 
67
 
68
+ ip = ip.strip()
69
+ if ip and ip not in proxy_storage['pool']:
70
+ if check_proxy(ip):
71
+ print(f" βœ… Verified: {ip}")
72
+ proxy_storage['pool'].append(ip)
 
 
 
 
 
 
 
 
 
73
 
74
+ proxy_storage['last_update'] = time.strftime("%H:%M:%S")
75
+ time.sleep(120) # Run every 2 minutes for freshness
76
 
77
+ threading.Thread(target=refinery_loop, daemon=True).start()
78
 
79
  # ==============================================================================
80
+ # πŸ”Œ API ENDPOINT
81
  # ==============================================================================
82
+ def serve_proxies():
83
+ return proxy_storage['pool'], f"Last Refined: {proxy_storage['last_update']}"
 
84
 
85
  with gr.Blocks() as app:
86
+ gr.Markdown("## 🏭 Proxy Refinery")
 
87
  with gr.Row():
88
+ out_json = gr.JSON(label="Active High-Speed Pool")
89
+ out_status = gr.Textbox(label="Status")
 
 
 
 
 
90
 
91
+ btn = gr.Button("Refresh")
92
+ btn.click(serve_proxies, outputs=[out_json, out_status], api_name="get_proxies")
93
+ app.load(serve_proxies, outputs=[out_json, out_status])
94
 
95
  app.launch()