File size: 4,111 Bytes
0378d2e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import argparse
import glob
import os
from concurrent.futures import ThreadPoolExecutor, as_completed
from PIL import Image
from tqdm import tqdm


def process_image(path, output_dir, scale_list, shortest_edge):
    """Обрабатывает одно изображение, создавая масштабированные версии."""
    basename = os.path.splitext(os.path.basename(path))[0]

    # Проверяем, все ли файлы уже существуют
    output_files = [os.path.join(output_dir, f'{basename}T{idx}.png') for idx in range(len(scale_list) + 1)]
    if all(os.path.exists(f) for f in output_files):
        return 'skipped'

    img = Image.open(path)
    width, height = img.size

    for idx, scale in enumerate(scale_list):
        output_path = os.path.join(output_dir, f'{basename}T{idx}.png')
        if not os.path.exists(output_path):
            rlt = img.resize((int(width * scale), int(height * scale)), resample=Image.LANCZOS)
            rlt.save(output_path)

    # save the smallest image which the shortest edge is 400
    output_path = os.path.join(output_dir, f'{basename}T{len(scale_list)}.png')
    if not os.path.exists(output_path):
        if width < height:
            ratio = height / width
            new_width = shortest_edge
            new_height = int(new_width * ratio)
        else:
            ratio = width / height
            new_height = shortest_edge
            new_width = int(new_height * ratio)
        rlt = img.resize((int(new_width), int(new_height)), resample=Image.LANCZOS)
        rlt.save(output_path)

    return 'processed'


def main(args):
    # For DF2K, we consider the following three scales,
    # and the smallest image whose shortest edge is 400
    scale_list = [0.75, 0.5, 1 / 3]
    shortest_edge = 400

    path_list = sorted(glob.glob(os.path.join(args.input, '*')))

    if not path_list:
        print('Изображения не найдены')
        return

    # Используем ThreadPoolExecutor для параллельной обработки
    with ThreadPoolExecutor(max_workers=args.workers) as executor:
        futures = {
            executor.submit(process_image, path, args.output, scale_list, shortest_edge): path
            for path in path_list
        }

        # Ожидаем завершения всех задач с прогресс-баром
        processed = 0
        skipped = 0
        errors = 0

        with tqdm(total=len(path_list), desc='Обработка изображений', unit='img') as pbar:
            for future in as_completed(futures):
                try:
                    result = future.result()
                    if result == 'skipped':
                        skipped += 1
                    else:
                        processed += 1
                    pbar.set_postfix({'обработано': processed, 'пропущено': skipped, 'ошибок': errors})
                except Exception as e:
                    path = futures[future]
                    errors += 1
                    tqdm.write(f'Ошибка при обработке {path}: {e}')
                    pbar.set_postfix({'обработано': processed, 'пропущено': skipped, 'ошибок': errors})
                finally:
                    pbar.update(1)


if __name__ == '__main__':
    """Generate multi-scale versions for GT images with LANCZOS resampling.
    It is now used for DF2K dataset (DIV2K + Flickr 2K)
    Multithreaded version.
    """
    parser = argparse.ArgumentParser()
    parser.add_argument('--input', type=str, default='datasets/DF2K/DF2K_HR', help='Input folder')
    parser.add_argument('--output', type=str, default='datasets/DF2K/DF2K_multiscale', help='Output folder')
    parser.add_argument('--workers', type=int, default=None, help='Number of worker threads (default: CPU count)')
    args = parser.parse_args()

    if args.workers is None:
        import multiprocessing
        args.workers = multiprocessing.cpu_count()

    os.makedirs(args.output, exist_ok=True)
    main(args)