import os import json def generate_hf_metadata(dataset_dir): lq_dir = os.path.join(dataset_dir, 'lq') gt_dir = os.path.join(dataset_dir, 'gt') # Check if both directories exist if not os.path.exists(lq_dir) or not os.path.exists(gt_dir): print(f"Skipping {dataset_dir}, missing 'lq' or 'gt' folders.") return metadata_path = os.path.join(dataset_dir, 'metadata.jsonl') count = 0 with open(metadata_path, 'w', encoding='utf-8') as f: # Traverse the lq folder for filename in sorted(os.listdir(lq_dir)): if filename.lower().endswith(('.png', '.jpg', '.jpeg', '.bmp', '.tif', '.webp')): gt_filename = filename if "_LR" in gt_filename: # Common SR substitutions (e.g., _LR4 -> _HR, _LR -> _HR) import re gt_filename = re.sub(r'_LR\d*', '_HR', gt_filename) gt_file_path = os.path.join(gt_dir, gt_filename) # Check if GT exists if not os.path.exists(gt_file_path): # Fallback to direct name if substitution was wrong if os.path.exists(os.path.join(gt_dir, filename)): gt_filename = filename gt_file_path = os.path.join(gt_dir, gt_filename) if os.path.exists(gt_file_path): record = { "file_name": f"lq/{filename}", "ground_truth": f"gt/{gt_filename}" } f.write(json.dumps(record) + '\n') count += 1 else: print(f"Warning: Missing GT image for -> {gt_file_path}") print(f"Successfully generated {metadata_path} for {dataset_dir}. Matched {count} image pairs.") if __name__ == "__main__": datasets = ['imagenet512', 'RealSet80', 'RealSR'] for ds in datasets: if os.path.exists(ds): generate_hf_metadata(ds) else: print(f"Directory not found: {ds}")