|
|
import argparse |
|
|
import glob |
|
|
import os |
|
|
from concurrent.futures import ThreadPoolExecutor, as_completed |
|
|
from PIL import Image |
|
|
from tqdm import tqdm |
|
|
|
|
|
|
|
|
def process_image(path, output_dir, scale_list, shortest_edge): |
|
|
"""Обрабатывает одно изображение, создавая масштабированные версии.""" |
|
|
basename = os.path.splitext(os.path.basename(path))[0] |
|
|
|
|
|
|
|
|
output_files = [os.path.join(output_dir, f'{basename}T{idx}.png') for idx in range(len(scale_list) + 1)] |
|
|
if all(os.path.exists(f) for f in output_files): |
|
|
return 'skipped' |
|
|
|
|
|
img = Image.open(path) |
|
|
width, height = img.size |
|
|
|
|
|
for idx, scale in enumerate(scale_list): |
|
|
output_path = os.path.join(output_dir, f'{basename}T{idx}.png') |
|
|
if not os.path.exists(output_path): |
|
|
rlt = img.resize((int(width * scale), int(height * scale)), resample=Image.LANCZOS) |
|
|
rlt.save(output_path) |
|
|
|
|
|
|
|
|
output_path = os.path.join(output_dir, f'{basename}T{len(scale_list)}.png') |
|
|
if not os.path.exists(output_path): |
|
|
if width < height: |
|
|
ratio = height / width |
|
|
new_width = shortest_edge |
|
|
new_height = int(new_width * ratio) |
|
|
else: |
|
|
ratio = width / height |
|
|
new_height = shortest_edge |
|
|
new_width = int(new_height * ratio) |
|
|
rlt = img.resize((int(new_width), int(new_height)), resample=Image.LANCZOS) |
|
|
rlt.save(output_path) |
|
|
|
|
|
return 'processed' |
|
|
|
|
|
|
|
|
def main(args): |
|
|
|
|
|
|
|
|
scale_list = [0.75, 0.5, 1 / 3] |
|
|
shortest_edge = 400 |
|
|
|
|
|
path_list = sorted(glob.glob(os.path.join(args.input, '*'))) |
|
|
|
|
|
if not path_list: |
|
|
print('Изображения не найдены') |
|
|
return |
|
|
|
|
|
|
|
|
with ThreadPoolExecutor(max_workers=args.workers) as executor: |
|
|
futures = { |
|
|
executor.submit(process_image, path, args.output, scale_list, shortest_edge): path |
|
|
for path in path_list |
|
|
} |
|
|
|
|
|
|
|
|
processed = 0 |
|
|
skipped = 0 |
|
|
errors = 0 |
|
|
|
|
|
with tqdm(total=len(path_list), desc='Обработка изображений', unit='img') as pbar: |
|
|
for future in as_completed(futures): |
|
|
try: |
|
|
result = future.result() |
|
|
if result == 'skipped': |
|
|
skipped += 1 |
|
|
else: |
|
|
processed += 1 |
|
|
pbar.set_postfix({'обработано': processed, 'пропущено': skipped, 'ошибок': errors}) |
|
|
except Exception as e: |
|
|
path = futures[future] |
|
|
errors += 1 |
|
|
tqdm.write(f'Ошибка при обработке {path}: {e}') |
|
|
pbar.set_postfix({'обработано': processed, 'пропущено': skipped, 'ошибок': errors}) |
|
|
finally: |
|
|
pbar.update(1) |
|
|
|
|
|
|
|
|
if __name__ == '__main__': |
|
|
"""Generate multi-scale versions for GT images with LANCZOS resampling. |
|
|
It is now used for DF2K dataset (DIV2K + Flickr 2K) |
|
|
Multithreaded version. |
|
|
""" |
|
|
parser = argparse.ArgumentParser() |
|
|
parser.add_argument('--input', type=str, default='datasets/DF2K/DF2K_HR', help='Input folder') |
|
|
parser.add_argument('--output', type=str, default='datasets/DF2K/DF2K_multiscale', help='Output folder') |
|
|
parser.add_argument('--workers', type=int, default=None, help='Number of worker threads (default: CPU count)') |
|
|
args = parser.parse_args() |
|
|
|
|
|
if args.workers is None: |
|
|
import multiprocessing |
|
|
args.workers = multiprocessing.cpu_count() |
|
|
|
|
|
os.makedirs(args.output, exist_ok=True) |
|
|
main(args) |
|
|
|
|
|
|