use anyhow::Result; use reqwest::{Client, header}; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, Write}; use std::sync::Arc; use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering}; use std::time::Duration; use tokio::sync::Semaphore; use futures::stream::{self, StreamExt}; const CONCURRENT: usize = 1000; // MAXED const TARGET_BYTES: u64 = 300_000_000_000; const OUTPUT: &str = "/workspace/crawled"; const SEEDS: &str = "/workspace/seeds_2m.txt"; static TOTAL_BYTES: AtomicU64 = AtomicU64::new(0); static PAGES: AtomicUsize = AtomicUsize::new(0); static ERRORS: AtomicUsize = AtomicUsize::new(0); #[tokio::main] async fn main() -> Result<()> { std::fs::create_dir_all(OUTPUT)?; let mut headers = header::HeaderMap::new(); headers.insert(header::ACCEPT, "text/html,*/*".parse()?); headers.insert(header::ACCEPT_LANGUAGE, "en-US,en;q=0.9".parse()?); headers.insert(header::ACCEPT_ENCODING, "gzip, deflate".parse()?); headers.insert(header::CACHE_CONTROL, "no-cache".parse()?); let client = Client::builder() .timeout(Duration::from_secs(10)) .connect_timeout(Duration::from_secs(5)) .user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36") .default_headers(headers) .pool_max_idle_per_host(200) .pool_idle_timeout(Duration::from_secs(30)) .redirect(reqwest::redirect::Policy::limited(3)) .danger_accept_invalid_certs(true) .build()?; println!("🔥 AGGRESSIVE CRAWLER - 1000 concurrent, NO DELAYS"); println!("Target: 300GB | Seeds: 2M"); let file = File::open(SEEDS)?; let seeds: Vec = BufReader::new(file).lines().filter_map(|l| l.ok()).collect(); println!("Loaded {} seeds - FIRING", seeds.len()); let sem = Arc::new(Semaphore::new(CONCURRENT)); let client = Arc::new(client); // Status thread let start = std::time::Instant::now(); tokio::spawn(async move { loop { tokio::time::sleep(Duration::from_secs(15)).await; let bytes = TOTAL_BYTES.load(Ordering::Relaxed); let pages = PAGES.load(Ordering::Relaxed); let errs = ERRORS.load(Ordering::Relaxed); let elapsed = start.elapsed().as_secs_f64(); let rate = bytes as f64 / elapsed / 1_000_000.0; let pct = 100.0 * bytes as f64 / TARGET_BYTES as f64; let eta_h = if rate > 0.0 { (TARGET_BYTES - bytes) as f64 / rate / 1_000_000.0 / 3600.0 } else { 999.0 }; println!("[{:.0}s] {:.2}% | {:.2} GB | {} pg | {} err | {:.1} MB/s | ETA {:.1}h", elapsed, pct, bytes as f64/1e9, pages, errs, rate, eta_h); } }); // BLAST through all seeds stream::iter(seeds) .for_each_concurrent(CONCURRENT, |url| { let client = client.clone(); let sem = sem.clone(); async move { if TOTAL_BYTES.load(Ordering::Relaxed) >= TARGET_BYTES { return; } let _permit = sem.acquire().await.unwrap(); match client.get(&url).send().await { Ok(resp) if resp.status().is_success() => { if let Ok(html) = resp.text().await { let text = strip_html(&html); if text.len() > 300 { let n = PAGES.fetch_add(1, Ordering::Relaxed); let path = format!("{}/p_{:08}.txt", OUTPUT, n); if let Ok(mut f) = OpenOptions::new().create(true).write(true).open(&path) { let content = format!("URL: {}\n\n{}", url, text); let _ = f.write_all(content.as_bytes()); TOTAL_BYTES.fetch_add(content.len() as u64, Ordering::Relaxed); } } } } _ => { ERRORS.fetch_add(1, Ordering::Relaxed); } } } }) .await; println!("✅ DONE: {} GB", TOTAL_BYTES.load(Ordering::Relaxed) / 1_000_000_000); Ok(()) } fn strip_html(html: &str) -> String { let mut out = String::with_capacity(html.len() / 4); let mut in_tag = false; let mut skip = 0u8; let bytes = html.as_bytes(); let len = bytes.len(); let mut i = 0; while i < len { let c = bytes[i]; match c { b'<' => { in_tag = true; // Check for script/style if i + 7 < len { let slice = &bytes[i..i+8]; if slice.eq_ignore_ascii_case(b"") { skip = 0; } if i + 7 < len && skip == 2 && bytes[i..i+8].eq_ignore_ascii_case(b"") { skip = 0; } } b'>' => { in_tag = false; } _ if !in_tag && skip == 0 => { if c.is_ascii_whitespace() { if !out.ends_with(' ') && !out.is_empty() { out.push(' '); } } else { out.push(c as char); } } _ => {} } i += 1; } out }