| from pathlib import Path |
| import requests |
| from bs4 import BeautifulSoup |
| from requests.adapters import HTTPAdapter |
| from urllib3.util.retry import Retry |
|
|
|
|
| def build_session( |
| max_retries: int = 3, |
| backoff_factor: int = 2, |
| session: requests.Session = None |
| ) -> requests.Session: |
| """ |
| Build a requests session with retries |
| |
| Args: |
| max_retries (int, optional): Number of retries. Defaults to 3. |
| backoff_factor (int, optional): Backoff factor. Defaults to 2. |
| session (requests.Session, optional): Session object. Defaults to None. |
| """ |
| session = session or requests.Session() |
| adapter = HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor)) |
| session.mount("http://", adapter) |
| session.mount("https://", adapter) |
| session.headers.update({ |
| "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3" |
| }) |
|
|
| return session |
|
|
|
|
| def main(): |
| output_dir = Path(__file__).parent.parent / "raw" |
| output_dir.mkdir(parents=True, exist_ok=True) |
|
|
| base = 'https://www.hmmt.org' |
| req_session = build_session() |
|
|
| response = req_session.get(f'{base}/www/archive/problems') |
| if response.status_code == 200: |
| soup = BeautifulSoup(response.text, 'html.parser') |
| tags = soup.find_all('a', href=lambda t: '/archive/' in t if t else False) |
|
|
| urls = [] |
| for tag in tags: |
| url = tag.get('href') |
| try: |
| num = int(url.split('/')[-1]) |
| urls.append(base + url) |
| except: |
| pass |
|
|
| for url in urls: |
| id = url.split('/')[-1] |
| response = req_session.get(url) |
|
|
| if response.status_code == 200: |
| soup = BeautifulSoup(response.text, 'html.parser') |
| tags = soup.find_all('a', href=lambda t: '.pdf' in t if t else False) |
| links = ['https:' + tag.get('href').lstrip('https:') for tag in tags if 'solution' in tag.text.lower()] |
|
|
| for link in links: |
| year, month, topic, name = link.split('/')[-4:] |
| response = req_session.get(link) |
| (output_dir / f'en-{id}-{year}-{month}-{topic}-{name}').write_bytes(response.content) |
| else: |
| print('Failed to reach', url) |
|
|
|
|
| if __name__ == '__main__': |
| main() |
|
|