File size: 5,023 Bytes
a49ab18 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
use std::collections::HashSet;
use std::fs::OpenOptions;
use std::io::Write;
use std::sync::{Arc, Mutex};
use std::time::{Duration, Instant};
use tokio::sync::Semaphore;
use reqwest::Client;
use scraper::{Html, Selector};
use url::Url;
const MAX_CONCURRENT: usize = 500;
const OUTPUT_FILE: &str = "/workspace/crawl.jsonl";
#[tokio::main]
async fn main() {
let client = Client::builder()
.timeout(Duration::from_secs(10))
.user_agent("Mozilla/5.0 (compatible; Googlebot/2.1)")
.build()
.unwrap();
let visited: Arc<Mutex<HashSet<String>>> = Arc::new(Mutex::new(HashSet::new()));
let queue: Arc<Mutex<Vec<String>>> = Arc::new(Mutex::new(Vec::new()));
let file = Arc::new(Mutex::new(
OpenOptions::new().create(true).append(true).open(OUTPUT_FILE).unwrap()
));
let semaphore = Arc::new(Semaphore::new(MAX_CONCURRENT));
// Seed URLs
let seeds = vec![
"https://en.wikipedia.org", "https://www.bbc.com", "https://arxiv.org",
"https://www.nature.com", "https://www.theguardian.com", "https://reuters.com",
"https://www.npr.org", "https://www.sciencedaily.com", "https://arstechnica.com",
];
{
let mut q = queue.lock().unwrap();
for seed in seeds {
q.push(seed.to_string());
}
}
let start = Instant::now();
let pages = Arc::new(Mutex::new(0u64));
let bytes = Arc::new(Mutex::new(0u64));
loop {
let url = {
let mut q = queue.lock().unwrap();
q.pop()
};
let url = match url {
Some(u) => u,
None => {
tokio::time::sleep(Duration::from_millis(100)).await;
continue;
}
};
{
let mut v = visited.lock().unwrap();
if v.contains(&url) { continue; }
v.insert(url.clone());
}
let permit = semaphore.clone().acquire_owned().await.unwrap();
let client = client.clone();
let queue = queue.clone();
let file = file.clone();
let pages = pages.clone();
let bytes = bytes.clone();
let visited = visited.clone();
let start = start.clone();
tokio::spawn(async move {
let _permit = permit;
if let Ok(resp) = client.get(&url).send().await {
if let Ok(text) = resp.text().await {
let text_len = text.len();
// Write to file
let json = serde_json::json!({
"url": url,
"text": text,
"tokens_est": text_len / 4
});
{
let mut f = file.lock().unwrap();
writeln!(f, "{}", json).ok();
}
// Extract links
let doc = Html::parse_document(&text);
let selector = Selector::parse("a[href]").unwrap();
let base = Url::parse(&url).ok();
let mut new_urls = Vec::new();
for elem in doc.select(&selector) {
if let Some(href) = elem.value().attr("href") {
if let Some(base) = &base {
if let Ok(abs) = base.join(href) {
let abs_str = abs.to_string();
if abs_str.starts_with("http") {
new_urls.push(abs_str);
}
}
}
}
}
{
let v = visited.lock().unwrap();
let mut q = queue.lock().unwrap();
for u in new_urls {
if !v.contains(&u) && q.len() < 5_000_000 {
q.push(u);
}
}
}
// Update stats
{
let mut p = pages.lock().unwrap();
let mut b = bytes.lock().unwrap();
*p += 1;
*b += text_len as u64;
if *p % 100 == 0 {
let elapsed = start.elapsed().as_secs();
let mb = *b / 1_048_576;
let qlen = queue.lock().unwrap().len();
let vlen = visited.lock().unwrap().len();
eprintln!("📊 {}s | {} pages | {} MB | queue: {} | visited: {}",
elapsed, p, mb, qlen, vlen);
}
}
}
}
});
}
}
|