import os import json import shutil base_processed_folder = "processed/lq" base_folder = "hq_og" processed_folder = "lq" output_base = os.path.join("dedup", processed_folder) # Make sure dedup/hq_og exists os.makedirs(output_base, exist_ok=True) # Iterate over JSON filter files in hq_og for file in os.listdir(base_folder): if not file.endswith("_filter.json"): continue json_path = os.path.join(base_folder, file) with open(json_path, "r") as f: data = json.load(f) # this is your list of keep_files moviename = data["moviename"] keep_files = data["keep_files"] qualities = [12,14,16,18,20,22,24,26,28,30,32,34,36,38,40] for quality in qualities: folder_name = moviename + f"_{quality}" input_folder = os.path.join(base_processed_folder, folder_name) output_folder = os.path.join(output_base, folder_name) os.makedirs(output_folder, exist_ok=True) print(f"Processing {folder_name}: copying {len(keep_files)} files") for img_name in keep_files: src = os.path.join(input_folder, img_name) dst_name = f"{folder_name}_{img_name}" dst = os.path.join(output_folder, dst_name) if os.path.exists(src): shutil.copy2(src, dst) else: print(f"Warning: file not found {src}") print("Copying complete. Deduplicated dataset saved in /dedup/hq_og/")