cry / app.py
lelafav502's picture
Update app.py
bab7b37 verified
import gradio as gr
import requests
import random
import threading
import time
import random
import urllib.parse
# List of URLssssssssssa
urls = [
"https://ntmtmp.xyz/forme.php",
]
#session_id = random.randint(1000, 9999)
# List of Geos
geos = ["us", "us", "us", "us", "us", "us", "uk", "ca", "ca", "uk", "us", "us", "us", "us", "us", "us", "us", "us", "us", "us"]
API_TOKEN = "96841d8751464ad5bbbc99412b750740ab2dd989bf2"
# Background function for scraping
def start_scraping(loop_interval=10):
def scrape_loop():
while True:
target_url = random.choice(urls)
encoded_url = urllib.parse.quote(target_url)
geo = random.choice(geos)
api_url = f"http://api.scrape.do/?url={encoded_url}&token={API_TOKEN}&super=true&geoCode={geo}&render=true&waitUntil=load&customWait=4000&width=1920&height=1080&returnJSON=true&blockResources=false&screenShot=true&playWithBrowser=%5B%7B%22Action%22%3A%22Wait%22%2C%22Timeout%22%3A1000%7D%2C%7B%22Action%22%3A%22Click%22%2C%22Selector%22%3A%22%23accept-btn%22%7D%2C%7B%22Action%22%3A%22Click%22%2C%22Selector%22%3A%22button%22%7D%2C%7B%22Action%22%3A%22Wait%22%2C%22Timeout%22%3A1000%7D%2C%7B%22Action%22%3A%22Click%22%2C%22Selector%22%3A%22button%22%7D%2C%7B%22Action%22%3A%22Wait%22%2C%22Timeout%22%3A12000%7D%2C%7B%22Action%22%3A%22Click%22%2C%22Selector%22%3A%22%23button_id%22%7D%2C%7B%22Action%22%3A%22Wait%22%2C%22Timeout%22%3A10000%7D%2C%7B%22Action%22%3A%22WaitSelector%22%2C%22WaitSelector%22%3A%22%23btn%22%2C%22Timeout%22%3A1000%7D%2C%7B%22Action%22%3A%22ScrollX%22%2C%22Value%22%3A100%7D%2C%7B%22Action%22%3A%22ScrollY%22%2C%22Value%22%3A100%7D%2C%7B%22Action%22%3A%22Wait%22%2C%22Timeout%22%3A11000%7D%2C%7B%22Action%22%3A%22Click%22%2C%22Selector%22%3A%22%23button_id%22%7D%2C%7B%22Action%22%3A%22Click%22%2C%22Selector%22%3A%22%23button_id%22%7D%2C%7B%22Action%22%3A%22Wait%22%2C%22Timeout%22%3A9000%7D%2C%7B%22Action%22%3A%22ScrollX%22%2C%22Value%22%3A100%7D%2C%7B%22Action%22%3A%22WaitSelector%22%2C%22WaitSelector%22%3A%22%23btn%22%2C%22Timeout%22%3A1000%7D%2C%7B%22Action%22%3A%22Wait%22%2C%22Timeout%22%3A7000%7D%5D"
try:
response = requests.get(api_url)
print(f"--- Geo: {geo} | URL: {target_url} ---")
print(response.text[:500], "\n") # Print first 500 chars to avoid huge output
except Exception as e:
print("Error:", e)
time.sleep(loop_interval) # Wait before next iteration
threading.Thread(target=scrape_loop, daemon=True).start()
return "Scraping started in background!"
# Gradio interface
iface = gr.Interface(
fn=start_scraping,
inputs=gr.Number(label="Loop Interval (seconds)", value=10),
outputs="text",
title="Background Web Scraper"
)
iface.launch()