|
|
use std::collections::HashSet; |
|
|
use std::fs::OpenOptions; |
|
|
use std::io::Write; |
|
|
use std::sync::{Arc, Mutex}; |
|
|
use std::time::{Duration, Instant}; |
|
|
use tokio::sync::Semaphore; |
|
|
use reqwest::Client; |
|
|
use scraper::{Html, Selector}; |
|
|
use url::Url; |
|
|
|
|
|
const MAX_CONCURRENT: usize = 500; |
|
|
const OUTPUT_FILE: &str = "/workspace/crawl.jsonl"; |
|
|
|
|
|
#[tokio::main] |
|
|
async fn main() { |
|
|
let client = Client::builder() |
|
|
.timeout(Duration::from_secs(10)) |
|
|
.user_agent("Mozilla/5.0 (compatible; Googlebot/2.1)") |
|
|
.build() |
|
|
.unwrap(); |
|
|
|
|
|
let visited: Arc<Mutex<HashSet<String>>> = Arc::new(Mutex::new(HashSet::new())); |
|
|
let queue: Arc<Mutex<Vec<String>>> = Arc::new(Mutex::new(Vec::new())); |
|
|
let file = Arc::new(Mutex::new( |
|
|
OpenOptions::new().create(true).append(true).open(OUTPUT_FILE).unwrap() |
|
|
)); |
|
|
let semaphore = Arc::new(Semaphore::new(MAX_CONCURRENT)); |
|
|
|
|
|
|
|
|
let seeds = vec![ |
|
|
"https://en.wikipedia.org", "https://www.bbc.com", "https://arxiv.org", |
|
|
"https://www.nature.com", "https://www.theguardian.com", "https://reuters.com", |
|
|
"https://www.npr.org", "https://www.sciencedaily.com", "https://arstechnica.com", |
|
|
]; |
|
|
|
|
|
{ |
|
|
let mut q = queue.lock().unwrap(); |
|
|
for seed in seeds { |
|
|
q.push(seed.to_string()); |
|
|
} |
|
|
} |
|
|
|
|
|
let start = Instant::now(); |
|
|
let pages = Arc::new(Mutex::new(0u64)); |
|
|
let bytes = Arc::new(Mutex::new(0u64)); |
|
|
|
|
|
loop { |
|
|
let url = { |
|
|
let mut q = queue.lock().unwrap(); |
|
|
q.pop() |
|
|
}; |
|
|
|
|
|
let url = match url { |
|
|
Some(u) => u, |
|
|
None => { |
|
|
tokio::time::sleep(Duration::from_millis(100)).await; |
|
|
continue; |
|
|
} |
|
|
}; |
|
|
|
|
|
{ |
|
|
let mut v = visited.lock().unwrap(); |
|
|
if v.contains(&url) { continue; } |
|
|
v.insert(url.clone()); |
|
|
} |
|
|
|
|
|
let permit = semaphore.clone().acquire_owned().await.unwrap(); |
|
|
let client = client.clone(); |
|
|
let queue = queue.clone(); |
|
|
let file = file.clone(); |
|
|
let pages = pages.clone(); |
|
|
let bytes = bytes.clone(); |
|
|
let visited = visited.clone(); |
|
|
let start = start.clone(); |
|
|
|
|
|
tokio::spawn(async move { |
|
|
let _permit = permit; |
|
|
|
|
|
if let Ok(resp) = client.get(&url).send().await { |
|
|
if let Ok(text) = resp.text().await { |
|
|
let text_len = text.len(); |
|
|
|
|
|
|
|
|
let json = serde_json::json!({ |
|
|
"url": url, |
|
|
"text": text, |
|
|
"tokens_est": text_len / 4 |
|
|
}); |
|
|
|
|
|
{ |
|
|
let mut f = file.lock().unwrap(); |
|
|
writeln!(f, "{}", json).ok(); |
|
|
} |
|
|
|
|
|
|
|
|
let doc = Html::parse_document(&text); |
|
|
let selector = Selector::parse("a[href]").unwrap(); |
|
|
let base = Url::parse(&url).ok(); |
|
|
|
|
|
let mut new_urls = Vec::new(); |
|
|
for elem in doc.select(&selector) { |
|
|
if let Some(href) = elem.value().attr("href") { |
|
|
if let Some(base) = &base { |
|
|
if let Ok(abs) = base.join(href) { |
|
|
let abs_str = abs.to_string(); |
|
|
if abs_str.starts_with("http") { |
|
|
new_urls.push(abs_str); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
{ |
|
|
let v = visited.lock().unwrap(); |
|
|
let mut q = queue.lock().unwrap(); |
|
|
for u in new_urls { |
|
|
if !v.contains(&u) && q.len() < 5_000_000 { |
|
|
q.push(u); |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
{ |
|
|
let mut p = pages.lock().unwrap(); |
|
|
let mut b = bytes.lock().unwrap(); |
|
|
*p += 1; |
|
|
*b += text_len as u64; |
|
|
|
|
|
if *p % 100 == 0 { |
|
|
let elapsed = start.elapsed().as_secs(); |
|
|
let mb = *b / 1_048_576; |
|
|
let qlen = queue.lock().unwrap().len(); |
|
|
let vlen = visited.lock().unwrap().len(); |
|
|
eprintln!("📊 {}s | {} pages | {} MB | queue: {} | visited: {}", |
|
|
elapsed, p, mb, qlen, vlen); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
}); |
|
|
} |
|
|
} |
|
|
|