pixmo-points / download_pixmo.py
Jian Zhang
Upload 8 files
a03ed61 verified
import datasets
from hashlib import sha256
import requests
from pathlib import Path
import json
import multiprocessing
from concurrent.futures import ThreadPoolExecutor, as_completed
import threading
# 创建一个全局锁
lock = threading.Lock()
root_path = './Datasets/'
cache_dir = Path(root_path, "pixmo_images")
cache_dir.mkdir(parents=True, exist_ok=True)
# 加载数据集
data = datasets.load_dataset("allenai/pixmo-points", split="train", cache_dir=root_path)
print(f"Dataset size: {len(data)}")
len_data = len(data)
# 线程池参数
# MAX_WORKERS = 12 # 可调整,建议 5-20 之间
cpu_cores = multiprocessing.cpu_count()
MAX_WORKERS = min(64, cpu_cores * 4)
print(f"CPU 核心数: {cpu_cores}, 线程池大小: {MAX_WORKERS}")
# 记录成功和失败的下载
success_indices = []
failed_indices = []
def download_image(i, example, timeout=15):
""" 下载图片并验证哈希 """
# print(i,len_data,f"{(i / len_data) * 100:.3f} %")
points = example['points']
if len(points) != 1:
return None, None # 仅处理 `len(points) == 1` 的数据
image_url = example["image_url"]
image_filename = f"{example['image_sha256']}.jpg"
image_path = cache_dir / image_filename
if image_path.exists():
print(image_path,'exists, skip')
return i, None # 已下载则跳过
try:
# 下载图片
# 下载时设置进程锁,避免多个进行写入一个同名文件
image_bytes = requests.get(image_url, timeout=timeout).content
with lock:
with open(image_path, "wb") as f:
f.write(image_bytes)
# 验证 SHA256
byte_hash = sha256(image_bytes).hexdigest()
if byte_hash != example["image_sha256"]:
# with lock: # 加锁删除错误文件,防止并发删除问题
image_path.unlink()
print(image_path,'hash mismatch, delete')
return None, i # 记录失败
print(image_path,'save success!')
return i, None # 记录成功
except requests.exceptions.Timeout:
print(f"Timeout while downloading {image_url}. Skipping...")
return None, i
except requests.exceptions.RequestException as e:
print(f"Error downloading {image_url}: {e}")
return None, i # 记录失败
completed_count = 0
# 多线程下载
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
# future_to_index = {executor.submit(download_image, i, data[i]): i for i in range(1038456,len_data)}
# for future in as_completed(future_to_index):
# success, failed = future.result()
# if success is not None:
# success_indices.append(success)
# if failed is not None:
# failed_indices.append(failed)
start_index = 1541083
future_to_index = {}
# 采用步进分配索引,避免相邻 i 被同一时间下载
for offset in range(0,MAX_WORKERS): # 12 个线程,每个线程处理不同的起点
# for i in range(1541083 + offset, len_data, MAX_WORKERS): # 每次步长为 MAX_WORKERS
for i in range(0 + offset, 1541083, MAX_WORKERS): # 每次步长为 MAX_WORKERS
future = executor.submit(download_image, i, data[i])
future_to_index[future] = i
# 处理结果
for future in as_completed(future_to_index):
success, failed = future.result()
if success is not None:
success_indices.append(success)
if failed is not None:
failed_indices.append(failed)
# 在主线程更新进度
completed_count += 1
# print(f"Progress: {completed_count}/{len_data} ({(completed_count / len_data) * 100:.3f}%)")
print(f"Progress: {completed_count}/{1541083} ({(completed_count / 1541083) * 100:.3f}%)")
# 保存成功和失败的索引
with open("one_points_indices.json", "w") as f:
json.dump(success_indices, f, indent=4)
with open("failed_indices.json", "w") as f:
json.dump(failed_indices, f, indent=4)
print(f"Total successful downloads: {len(success_indices)}")
print(f"Total failed downloads: {len(failed_indices)}")
# import datasets
# from hashlib import sha256
# import requests
# import os
# from pathlib import Path
# from hashlib import sha256
# import requests
# from datasets import load_dataset
# import json
# # os.environ["HF_ENDPOINT"] = "https://hf-mirror.com"
# # export HF_ENDPOINT=https://hf-mirror.com
# root_path = './Datasets/'
# cache_dir = Path(root_path, "pixmo_images")
# cache_dir.mkdir(parents=True, exist_ok=True)
# # 加载数据集
# data = load_dataset("allenai/pixmo-points", split="train", cache_dir=root_path)
# print(len(data))
# len_data = len(data)
# def download_image(example, timeout=10):
# # print(example)
# points = example['points']
# len_points = len(points)
# if len_points > 1:
# return None
# if len_points ==0:
# return None
# image_url = example["image_url"]
# print(example['image_sha256'],len_points,points)
# image_filename = f"{example['image_sha256']}.jpg"
# image_path = cache_dir / image_filename
# # 如果图片未缓存,则下载
# if not image_path.exists():
# try:
# # 设置超时参数
# image_bytes = requests.get(image_url, timeout=timeout).content
# with open(image_path, "wb") as f:
# f.write(image_bytes)
# # 验证 SHA256
# byte_hash = sha256(image_bytes).hexdigest()
# assert byte_hash == example["image_sha256"], "SHA256 mismatch!"
# except requests.exceptions.Timeout:
# print(f"Timeout while downloading {image_url}. Skipping...")
# except Exception as e:
# print(f"Error downloading {image_url}: {e}")
# return True
# # 预下载所有图片(仅第一次运行需要)
# summ = 0
# success_indices = [] # 用于存储成功的 i 值
# for i, example in enumerate(data):
# print(i, len_data, f"{(i / len_data) * 100:.2f} %")
# s = download_image(example)
# if s:
# summ += 1
# success_indices.append(i) # 记录成功的 i
# # 将成功的 i 值保存到 JSON 文件
# with open("one_points_indices.json", "w") as f:
# json.dump(success_indices, f, indent=4)
# print("Total successful downloads:", summ)