Datasets:
File size: 2,155 Bytes
390625f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | import os
import json
def generate_hf_metadata(dataset_dir):
lq_dir = os.path.join(dataset_dir, 'lq')
gt_dir = os.path.join(dataset_dir, 'gt')
# Check if both directories exist
if not os.path.exists(lq_dir) or not os.path.exists(gt_dir):
print(f"Skipping {dataset_dir}, missing 'lq' or 'gt' folders.")
return
metadata_path = os.path.join(dataset_dir, 'metadata.jsonl')
count = 0
with open(metadata_path, 'w', encoding='utf-8') as f:
# Traverse the lq folder
for filename in sorted(os.listdir(lq_dir)):
if filename.lower().endswith(('.png', '.jpg', '.jpeg', '.bmp', '.tif', '.webp')):
gt_filename = filename
if "_LR" in gt_filename:
# Common SR substitutions (e.g., _LR4 -> _HR, _LR -> _HR)
import re
gt_filename = re.sub(r'_LR\d*', '_HR', gt_filename)
gt_file_path = os.path.join(gt_dir, gt_filename)
# Check if GT exists
if not os.path.exists(gt_file_path):
# Fallback to direct name if substitution was wrong
if os.path.exists(os.path.join(gt_dir, filename)):
gt_filename = filename
gt_file_path = os.path.join(gt_dir, gt_filename)
if os.path.exists(gt_file_path):
record = {
"file_name": f"lq/{filename}",
"ground_truth": f"gt/{gt_filename}"
}
f.write(json.dumps(record) + '\n')
count += 1
else:
print(f"Warning: Missing GT image for -> {gt_file_path}")
print(f"Successfully generated {metadata_path} for {dataset_dir}. Matched {count} image pairs.")
if __name__ == "__main__":
datasets = ['imagenet512', 'RealSet80', 'RealSR']
for ds in datasets:
if os.path.exists(ds):
generate_hf_metadata(ds)
else:
print(f"Directory not found: {ds}")
|