GPRXY / app.py
lelafav502's picture
Create app.py
f0712ca verified
import requests
from bs4 import BeautifulSoup
import concurrent.futures
import gradio as gr
URL = "https://tomcat1235.nyc.mn/proxy_list"
TIMEOUT = 8
OUTPUT_FILE = "work.txt"
def fetch_proxies():
response = requests.get(URL, timeout=15)
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
proxies = []
rows = soup.find_all("tr")
for row in rows:
cols = row.find_all("td")
if len(cols) >= 3:
try:
proxy_type = cols[0].get_text(strip=True).lower()
ip = cols[1].get_text(strip=True)
port = cols[2].get_text(strip=True)
if ip.count(".") == 3 and port.isdigit():
proxies.append({
"type": proxy_type,
"ip": ip,
"port": port
})
except:
continue
return proxies
def test_proxy(proxy):
ip = proxy["ip"]
port = proxy["port"]
proxy_type = proxy["type"]
proxy_url = f"{proxy_type}://{ip}:{port}"
proxies_dict = {
"http": proxy_url,
"https": proxy_url
}
try:
r = requests.get(
"http://httpbin.org/ip",
proxies=proxies_dict,
timeout=TIMEOUT
)
if r.status_code == 200:
return f"{ip}:{port}" # ✅ Return without socks5://
except:
pass
return None
def check_proxies():
try:
proxies = fetch_proxies()
except Exception as e:
return f"Error fetching proxies:\n{e}", None
working = []
with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
results = executor.map(test_proxy, proxies)
for result in results:
if result:
working.append(result)
if not working:
return "No working proxies found.", None
# Save to file
with open(OUTPUT_FILE, "w") as f:
for proxy in working:
f.write(proxy + "\n")
return "\n".join(working), OUTPUT_FILE
# Gradio Interface
with gr.Blocks() as app:
gr.Markdown("# Proxy Checker")
gr.Markdown("Fetch and test proxies. Returns only working IP:PORT (no socks5:// prefix).")
output_box = gr.Textbox(label="Working Proxies", lines=15)
download_file = gr.File(label="Download working proxies")
check_button = gr.Button("Check Proxies")
check_button.click(
fn=check_proxies,
outputs=[output_box, download_file]
)
app.launch()