Upload src/main.rs with huggingface_hub
Browse files- src/main.rs +140 -0
src/main.rs
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
use anyhow::Result;
|
| 2 |
+
use reqwest::{Client, header};
|
| 3 |
+
use std::fs::{File, OpenOptions};
|
| 4 |
+
use std::io::{BufRead, BufReader, Write};
|
| 5 |
+
use std::sync::Arc;
|
| 6 |
+
use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering};
|
| 7 |
+
use std::time::Duration;
|
| 8 |
+
use tokio::sync::Semaphore;
|
| 9 |
+
use futures::stream::{self, StreamExt};
|
| 10 |
+
|
| 11 |
+
const CONCURRENT: usize = 1000; // MAXED
|
| 12 |
+
const TARGET_BYTES: u64 = 300_000_000_000;
|
| 13 |
+
const OUTPUT: &str = "/workspace/crawled";
|
| 14 |
+
const SEEDS: &str = "/workspace/seeds_2m.txt";
|
| 15 |
+
|
| 16 |
+
static TOTAL_BYTES: AtomicU64 = AtomicU64::new(0);
|
| 17 |
+
static PAGES: AtomicUsize = AtomicUsize::new(0);
|
| 18 |
+
static ERRORS: AtomicUsize = AtomicUsize::new(0);
|
| 19 |
+
|
| 20 |
+
#[tokio::main]
|
| 21 |
+
async fn main() -> Result<()> {
|
| 22 |
+
std::fs::create_dir_all(OUTPUT)?;
|
| 23 |
+
|
| 24 |
+
let mut headers = header::HeaderMap::new();
|
| 25 |
+
headers.insert(header::ACCEPT, "text/html,*/*".parse()?);
|
| 26 |
+
headers.insert(header::ACCEPT_LANGUAGE, "en-US,en;q=0.9".parse()?);
|
| 27 |
+
headers.insert(header::ACCEPT_ENCODING, "gzip, deflate".parse()?);
|
| 28 |
+
headers.insert(header::CACHE_CONTROL, "no-cache".parse()?);
|
| 29 |
+
|
| 30 |
+
let client = Client::builder()
|
| 31 |
+
.timeout(Duration::from_secs(10))
|
| 32 |
+
.connect_timeout(Duration::from_secs(5))
|
| 33 |
+
.user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36")
|
| 34 |
+
.default_headers(headers)
|
| 35 |
+
.pool_max_idle_per_host(200)
|
| 36 |
+
.pool_idle_timeout(Duration::from_secs(30))
|
| 37 |
+
.redirect(reqwest::redirect::Policy::limited(3))
|
| 38 |
+
.danger_accept_invalid_certs(true)
|
| 39 |
+
.build()?;
|
| 40 |
+
|
| 41 |
+
println!("🔥 AGGRESSIVE CRAWLER - 1000 concurrent, NO DELAYS");
|
| 42 |
+
println!("Target: 300GB | Seeds: 2M");
|
| 43 |
+
|
| 44 |
+
let file = File::open(SEEDS)?;
|
| 45 |
+
let seeds: Vec<String> = BufReader::new(file).lines().filter_map(|l| l.ok()).collect();
|
| 46 |
+
println!("Loaded {} seeds - FIRING", seeds.len());
|
| 47 |
+
|
| 48 |
+
let sem = Arc::new(Semaphore::new(CONCURRENT));
|
| 49 |
+
let client = Arc::new(client);
|
| 50 |
+
|
| 51 |
+
// Status thread
|
| 52 |
+
let start = std::time::Instant::now();
|
| 53 |
+
tokio::spawn(async move {
|
| 54 |
+
loop {
|
| 55 |
+
tokio::time::sleep(Duration::from_secs(15)).await;
|
| 56 |
+
let bytes = TOTAL_BYTES.load(Ordering::Relaxed);
|
| 57 |
+
let pages = PAGES.load(Ordering::Relaxed);
|
| 58 |
+
let errs = ERRORS.load(Ordering::Relaxed);
|
| 59 |
+
let elapsed = start.elapsed().as_secs_f64();
|
| 60 |
+
let rate = bytes as f64 / elapsed / 1_000_000.0;
|
| 61 |
+
let pct = 100.0 * bytes as f64 / TARGET_BYTES as f64;
|
| 62 |
+
let eta_h = if rate > 0.0 { (TARGET_BYTES - bytes) as f64 / rate / 1_000_000.0 / 3600.0 } else { 999.0 };
|
| 63 |
+
|
| 64 |
+
println!("[{:.0}s] {:.2}% | {:.2} GB | {} pg | {} err | {:.1} MB/s | ETA {:.1}h",
|
| 65 |
+
elapsed, pct, bytes as f64/1e9, pages, errs, rate, eta_h);
|
| 66 |
+
}
|
| 67 |
+
});
|
| 68 |
+
|
| 69 |
+
// BLAST through all seeds
|
| 70 |
+
stream::iter(seeds)
|
| 71 |
+
.for_each_concurrent(CONCURRENT, |url| {
|
| 72 |
+
let client = client.clone();
|
| 73 |
+
let sem = sem.clone();
|
| 74 |
+
|
| 75 |
+
async move {
|
| 76 |
+
if TOTAL_BYTES.load(Ordering::Relaxed) >= TARGET_BYTES { return; }
|
| 77 |
+
|
| 78 |
+
let _permit = sem.acquire().await.unwrap();
|
| 79 |
+
|
| 80 |
+
match client.get(&url).send().await {
|
| 81 |
+
Ok(resp) if resp.status().is_success() => {
|
| 82 |
+
if let Ok(html) = resp.text().await {
|
| 83 |
+
let text = strip_html(&html);
|
| 84 |
+
if text.len() > 300 {
|
| 85 |
+
let n = PAGES.fetch_add(1, Ordering::Relaxed);
|
| 86 |
+
let path = format!("{}/p_{:08}.txt", OUTPUT, n);
|
| 87 |
+
if let Ok(mut f) = OpenOptions::new().create(true).write(true).open(&path) {
|
| 88 |
+
let content = format!("URL: {}\n\n{}", url, text);
|
| 89 |
+
let _ = f.write_all(content.as_bytes());
|
| 90 |
+
TOTAL_BYTES.fetch_add(content.len() as u64, Ordering::Relaxed);
|
| 91 |
+
}
|
| 92 |
+
}
|
| 93 |
+
}
|
| 94 |
+
}
|
| 95 |
+
_ => { ERRORS.fetch_add(1, Ordering::Relaxed); }
|
| 96 |
+
}
|
| 97 |
+
}
|
| 98 |
+
})
|
| 99 |
+
.await;
|
| 100 |
+
|
| 101 |
+
println!("✅ DONE: {} GB", TOTAL_BYTES.load(Ordering::Relaxed) / 1_000_000_000);
|
| 102 |
+
Ok(())
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
fn strip_html(html: &str) -> String {
|
| 106 |
+
let mut out = String::with_capacity(html.len() / 4);
|
| 107 |
+
let mut in_tag = false;
|
| 108 |
+
let mut skip = 0u8;
|
| 109 |
+
let bytes = html.as_bytes();
|
| 110 |
+
let len = bytes.len();
|
| 111 |
+
let mut i = 0;
|
| 112 |
+
|
| 113 |
+
while i < len {
|
| 114 |
+
let c = bytes[i];
|
| 115 |
+
match c {
|
| 116 |
+
b'<' => {
|
| 117 |
+
in_tag = true;
|
| 118 |
+
// Check for script/style
|
| 119 |
+
if i + 7 < len {
|
| 120 |
+
let slice = &bytes[i..i+8];
|
| 121 |
+
if slice.eq_ignore_ascii_case(b"<script>") || slice.eq_ignore_ascii_case(b"<script ") { skip = 1; }
|
| 122 |
+
if slice.eq_ignore_ascii_case(b"<style>s") || slice.eq_ignore_ascii_case(b"<style s") { skip = 2; }
|
| 123 |
+
}
|
| 124 |
+
if i + 8 < len && skip == 1 && bytes[i..i+9].eq_ignore_ascii_case(b"</script>") { skip = 0; }
|
| 125 |
+
if i + 7 < len && skip == 2 && bytes[i..i+8].eq_ignore_ascii_case(b"</style>") { skip = 0; }
|
| 126 |
+
}
|
| 127 |
+
b'>' => { in_tag = false; }
|
| 128 |
+
_ if !in_tag && skip == 0 => {
|
| 129 |
+
if c.is_ascii_whitespace() {
|
| 130 |
+
if !out.ends_with(' ') && !out.is_empty() { out.push(' '); }
|
| 131 |
+
} else {
|
| 132 |
+
out.push(c as char);
|
| 133 |
+
}
|
| 134 |
+
}
|
| 135 |
+
_ => {}
|
| 136 |
+
}
|
| 137 |
+
i += 1;
|
| 138 |
+
}
|
| 139 |
+
out
|
| 140 |
+
}
|