|
|
import requests |
|
|
from bs4 import BeautifulSoup |
|
|
from urllib.parse import urlparse |
|
|
import os |
|
|
import aiohttp |
|
|
import asyncio |
|
|
import time |
|
|
start = time.perf_counter() |
|
|
def get_urls(): |
|
|
url = 'https://hashir672.serv00.net/' |
|
|
reqs = requests.get(url) |
|
|
soup = BeautifulSoup(reqs.text, 'html.parser') |
|
|
|
|
|
urls = [] |
|
|
for link in soup.find_all('a'): |
|
|
file_link = link.get('href') |
|
|
urls.append(url+file_link) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return urls |
|
|
|
|
|
|
|
|
urls = get_urls() |
|
|
|
|
|
|
|
|
async def download_image(url): |
|
|
|
|
|
async with aiohttp.ClientSession() as session: |
|
|
async with session.get(url) as resp: |
|
|
with open("./extract_member/"+url.split("/")[-1], 'wb') as f: |
|
|
f.write(await resp.read()) |
|
|
|
|
|
|
|
|
async def main(): |
|
|
await asyncio.gather(*[download_image(url) for url in urls]) |
|
|
|
|
|
asyncio.run(main()) |
|
|
|
|
|
print(f"Total time: {time.perf_counter() - start}") |
|
|
|
|
|
|
|
|
|
|
|
|