Datasets:
Tasks:
Video Classification
Modalities:
Video
Languages:
English
Size:
10K<n<100K
Tags:
video
License:
Update k600/quality_check.py
Browse files- k600/quality_check.py +46 -82
k600/quality_check.py
CHANGED
|
@@ -5,50 +5,43 @@ from fractions import Fraction
|
|
| 5 |
import sys
|
| 6 |
import concurrent.futures
|
| 7 |
import time
|
| 8 |
-
import threading
|
| 9 |
|
| 10 |
-
# --- (get_video_info function remains the same as the robust version) ---
|
| 11 |
def get_video_info(video_path):
|
| 12 |
-
# Reuse the robust get_video_info function from the previous version
|
| 13 |
cmd = [
|
| 14 |
"ffprobe", "-v", "error", "-show_entries",
|
| 15 |
"format=duration:stream=codec_type,r_frame_rate",
|
| 16 |
"-of", "json", "-i", video_path
|
| 17 |
]
|
| 18 |
-
# Increased timeout slightly, can be adjusted
|
| 19 |
try:
|
| 20 |
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
| 21 |
-
text=True, timeout=45, check=False)
|
| 22 |
except subprocess.TimeoutExpired:
|
| 23 |
-
print(f"
|
| 24 |
-
return None
|
| 25 |
|
| 26 |
if result.returncode != 0:
|
| 27 |
error_msg = result.stderr.strip()
|
| 28 |
-
print(f"
|
| 29 |
-
return None
|
| 30 |
|
| 31 |
try:
|
| 32 |
data = json.loads(result.stdout)
|
| 33 |
except json.JSONDecodeError:
|
| 34 |
-
print(f"
|
| 35 |
-
return None
|
| 36 |
|
| 37 |
-
# --- Safely get duration ---
|
| 38 |
duration = None
|
| 39 |
if "format" in data and "duration" in data["format"]:
|
| 40 |
try:
|
| 41 |
duration = float(data["format"]["duration"])
|
| 42 |
except (ValueError, TypeError):
|
| 43 |
-
|
| 44 |
-
print(f"⚠️ Invalid duration format: {os.path.basename(video_path)}", file=sys.stderr)
|
| 45 |
return None
|
| 46 |
else:
|
| 47 |
-
|
| 48 |
-
print(f"⚠️ No duration found: {os.path.basename(video_path)}", file=sys.stderr)
|
| 49 |
return None
|
| 50 |
|
| 51 |
-
# --- Safely get FPS ---
|
| 52 |
fps = None
|
| 53 |
if "streams" in data:
|
| 54 |
for stream in data["streams"]:
|
|
@@ -57,49 +50,39 @@ def get_video_info(video_path):
|
|
| 57 |
if fps_str and fps_str != "0/0":
|
| 58 |
try:
|
| 59 |
fps = float(Fraction(*map(int, fps_str.split("/"))))
|
| 60 |
-
break
|
| 61 |
except (ValueError, ZeroDivisionError, TypeError):
|
| 62 |
-
|
| 63 |
else:
|
| 64 |
-
|
| 65 |
|
| 66 |
-
# --- Return results only if both duration and FPS are valid ---
|
| 67 |
if duration is not None and fps is not None:
|
| 68 |
return {
|
| 69 |
"duration": duration,
|
| 70 |
"fps": fps
|
| 71 |
}
|
| 72 |
-
elif duration is not None:
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
else: # Should be covered by earlier checks, but safety first
|
| 77 |
return None
|
| 78 |
-
# --- (End of get_video_info) ---
|
| 79 |
-
|
| 80 |
|
| 81 |
def process_video(video_path, log_file_path, log_lock):
|
| 82 |
-
"""
|
| 83 |
-
Worker function to process a single video file.
|
| 84 |
-
Checks criteria, removes if needed, and logs the processed path.
|
| 85 |
-
Returns: Tuple (status_string, video_path) e.g. ("kept", "/path/to/vid.mp4")
|
| 86 |
-
status_string can be "kept", "removed", "error"
|
| 87 |
-
"""
|
| 88 |
absolute_path = os.path.abspath(video_path)
|
| 89 |
base_name = os.path.basename(video_path)
|
| 90 |
-
status = "error"
|
| 91 |
|
| 92 |
try:
|
| 93 |
info = get_video_info(video_path)
|
| 94 |
|
| 95 |
if not info:
|
| 96 |
-
print(f"
|
| 97 |
try:
|
| 98 |
os.remove(video_path)
|
| 99 |
-
print(f"
|
| 100 |
status = "removed"
|
| 101 |
except OSError as e:
|
| 102 |
-
print(f"
|
| 103 |
status = "error"
|
| 104 |
else:
|
| 105 |
duration, fps = info["duration"], info["fps"]
|
|
@@ -112,48 +95,44 @@ def process_video(video_path, log_file_path, log_lock):
|
|
| 112 |
if remove_reason:
|
| 113 |
try:
|
| 114 |
os.remove(video_path)
|
| 115 |
-
print(f"
|
| 116 |
status = "removed"
|
| 117 |
except OSError as e:
|
| 118 |
-
print(f"
|
| 119 |
-
status = "error"
|
| 120 |
else:
|
| 121 |
-
# Keep print concise for valid files during parallel runs
|
| 122 |
status = "kept"
|
| 123 |
|
| 124 |
except subprocess.TimeoutExpired:
|
| 125 |
-
print(f"
|
| 126 |
try:
|
| 127 |
os.remove(video_path)
|
| 128 |
-
print(f"
|
| 129 |
status = "removed"
|
| 130 |
except OSError as e:
|
| 131 |
-
print(f"
|
| 132 |
status = "error"
|
| 133 |
|
| 134 |
except Exception as e:
|
| 135 |
-
print(f"
|
| 136 |
try:
|
| 137 |
os.remove(video_path)
|
| 138 |
-
print(f"
|
| 139 |
status = "removed"
|
| 140 |
except OSError as e:
|
| 141 |
-
print(f"
|
| 142 |
status = "error"
|
| 143 |
|
| 144 |
-
# --- Log regardless of status to prevent reprocessing ---
|
| 145 |
try:
|
| 146 |
-
with log_lock:
|
| 147 |
with open(log_file_path, 'a', encoding='utf-8') as log_f:
|
| 148 |
log_f.write(absolute_path + '\n')
|
| 149 |
except IOError as e:
|
| 150 |
-
print(f"
|
| 151 |
|
| 152 |
return status, absolute_path
|
| 153 |
|
| 154 |
-
|
| 155 |
def load_processed_files(log_file_path):
|
| 156 |
-
"""Loads processed file paths from the log file into a set."""
|
| 157 |
processed = set()
|
| 158 |
if os.path.exists(log_file_path):
|
| 159 |
try:
|
|
@@ -161,15 +140,10 @@ def load_processed_files(log_file_path):
|
|
| 161 |
for line in f:
|
| 162 |
processed.add(line.strip())
|
| 163 |
except IOError as e:
|
| 164 |
-
|
| 165 |
-
# Continue with an empty set, processing might repeat
|
| 166 |
return processed
|
| 167 |
|
| 168 |
-
|
| 169 |
def filter_videos_parallel(root_dir, log_file_path=".processed_videos.log", max_workers=None):
|
| 170 |
-
"""
|
| 171 |
-
Filters videos in parallel using ThreadPoolExecutor, supporting resume and progress display.
|
| 172 |
-
"""
|
| 173 |
if max_workers is None:
|
| 174 |
max_workers = min(os.cpu_count() * 2, 16)
|
| 175 |
|
|
@@ -178,11 +152,9 @@ def filter_videos_parallel(root_dir, log_file_path=".processed_videos.log", max_
|
|
| 178 |
print(f"Max workers: {max_workers}")
|
| 179 |
start_time = time.time()
|
| 180 |
|
| 181 |
-
# 1. Load already processed files
|
| 182 |
processed_files_set = load_processed_files(log_file_path)
|
| 183 |
print(f"Loaded {len(processed_files_set)} paths from log file.")
|
| 184 |
|
| 185 |
-
# 2. Collect video file paths *not* in the processed set
|
| 186 |
video_paths_to_process = []
|
| 187 |
total_files_found = 0
|
| 188 |
for dirpath, _, files in os.walk(root_dir):
|
|
@@ -194,8 +166,7 @@ def filter_videos_parallel(root_dir, log_file_path=".processed_videos.log", max_
|
|
| 194 |
video_paths_to_process.append(absolute_path)
|
| 195 |
|
| 196 |
skipped_count = total_files_found - len(video_paths_to_process)
|
| 197 |
-
total_to_process_this_run = len(video_paths_to_process)
|
| 198 |
-
|
| 199 |
print(f"Found {total_files_found} total MP4 files.")
|
| 200 |
if skipped_count > 0:
|
| 201 |
print(f"Skipping {skipped_count} files already processed (found in log).")
|
|
@@ -206,11 +177,10 @@ def filter_videos_parallel(root_dir, log_file_path=".processed_videos.log", max_
|
|
| 206 |
|
| 207 |
print(f"Processing {total_to_process_this_run} new files...")
|
| 208 |
|
| 209 |
-
# 3. Use ThreadPoolExecutor
|
| 210 |
results = {"kept": 0, "removed": 0, "error": 0}
|
| 211 |
log_lock = threading.Lock()
|
| 212 |
-
completed_count = 0
|
| 213 |
-
progress_update_interval = 50
|
| 214 |
|
| 215 |
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
|
| 216 |
future_to_path = {
|
|
@@ -226,36 +196,31 @@ def filter_videos_parallel(root_dir, log_file_path=".processed_videos.log", max_
|
|
| 226 |
if status in results:
|
| 227 |
results[status] += 1
|
| 228 |
else:
|
| 229 |
-
print(f"
|
| 230 |
results["error"] += 1
|
| 231 |
|
| 232 |
-
# --- Progress Update Logic ---
|
| 233 |
completed_count += 1
|
| 234 |
-
# Update progress every N files or on the very last file
|
| 235 |
if completed_count % progress_update_interval == 0 or completed_count == total_to_process_this_run:
|
| 236 |
percent = (completed_count / total_to_process_this_run) * 100
|
| 237 |
-
# Use \r to return to beginning of line, pad with spaces to clear previous longer messages
|
| 238 |
progress_line = f"\rProgress: {completed_count} / {total_to_process_this_run} ({percent:.1f}%) completed. "
|
| 239 |
-
print(progress_line, end='', flush=True)
|
| 240 |
|
| 241 |
except Exception as exc:
|
| 242 |
-
print(f"\
|
| 243 |
results["error"] += 1
|
| 244 |
-
completed_count += 1
|
| 245 |
-
# Also log this path as processed
|
| 246 |
try:
|
| 247 |
with log_lock:
|
| 248 |
with open(log_file_path, 'a', encoding='utf-8') as log_f:
|
| 249 |
log_f.write(os.path.abspath(original_path) + '\n')
|
| 250 |
except IOError as e:
|
| 251 |
-
|
| 252 |
|
| 253 |
except KeyboardInterrupt:
|
| 254 |
-
|
| 255 |
-
|
| 256 |
|
| 257 |
-
|
| 258 |
-
print() # Print a newline to move cursor off the progress line before the summary
|
| 259 |
|
| 260 |
end_time = time.time()
|
| 261 |
print("\n--- Filtering Complete ---")
|
|
@@ -266,14 +231,13 @@ def filter_videos_parallel(root_dir, log_file_path=".processed_videos.log", max_
|
|
| 266 |
print(f"Total files skipped (already processed): {skipped_count}")
|
| 267 |
print(f"Total time for this run: {end_time - start_time:.2f} seconds")
|
| 268 |
|
| 269 |
-
|
| 270 |
if __name__ == "__main__":
|
| 271 |
target_directory = "./train/train/"
|
| 272 |
script_dir = os.path.dirname(os.path.abspath(__file__))
|
| 273 |
log_file = os.path.join(script_dir, ".processed_videos.log")
|
| 274 |
|
| 275 |
if not os.path.isdir(target_directory):
|
| 276 |
-
print(f"
|
| 277 |
sys.exit(1)
|
| 278 |
|
| 279 |
-
filter_videos_parallel(target_directory, log_file_path=log_file)
|
|
|
|
| 5 |
import sys
|
| 6 |
import concurrent.futures
|
| 7 |
import time
|
| 8 |
+
import threading
|
| 9 |
|
|
|
|
| 10 |
def get_video_info(video_path):
|
|
|
|
| 11 |
cmd = [
|
| 12 |
"ffprobe", "-v", "error", "-show_entries",
|
| 13 |
"format=duration:stream=codec_type,r_frame_rate",
|
| 14 |
"-of", "json", "-i", video_path
|
| 15 |
]
|
|
|
|
| 16 |
try:
|
| 17 |
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
| 18 |
+
text=True, timeout=45, check=False)
|
| 19 |
except subprocess.TimeoutExpired:
|
| 20 |
+
print(f"Probe timed out: {os.path.basename(video_path)}", file=sys.stderr)
|
| 21 |
+
return None
|
| 22 |
|
| 23 |
if result.returncode != 0:
|
| 24 |
error_msg = result.stderr.strip()
|
| 25 |
+
print(f"Probe error ({result.returncode}): {os.path.basename(video_path)} - {error_msg}", file=sys.stderr)
|
| 26 |
+
return None
|
| 27 |
|
| 28 |
try:
|
| 29 |
data = json.loads(result.stdout)
|
| 30 |
except json.JSONDecodeError:
|
| 31 |
+
print(f"JSON decode error: {os.path.basename(video_path)}", file=sys.stderr)
|
| 32 |
+
return None
|
| 33 |
|
|
|
|
| 34 |
duration = None
|
| 35 |
if "format" in data and "duration" in data["format"]:
|
| 36 |
try:
|
| 37 |
duration = float(data["format"]["duration"])
|
| 38 |
except (ValueError, TypeError):
|
| 39 |
+
print(f"Invalid duration format: {os.path.basename(video_path)}", file=sys.stderr)
|
|
|
|
| 40 |
return None
|
| 41 |
else:
|
| 42 |
+
print(f"No duration found: {os.path.basename(video_path)}", file=sys.stderr)
|
|
|
|
| 43 |
return None
|
| 44 |
|
|
|
|
| 45 |
fps = None
|
| 46 |
if "streams" in data:
|
| 47 |
for stream in data["streams"]:
|
|
|
|
| 50 |
if fps_str and fps_str != "0/0":
|
| 51 |
try:
|
| 52 |
fps = float(Fraction(*map(int, fps_str.split("/"))))
|
| 53 |
+
break
|
| 54 |
except (ValueError, ZeroDivisionError, TypeError):
|
| 55 |
+
print(f"Invalid FPS format '{fps_str}': {os.path.basename(video_path)}", file=sys.stderr)
|
| 56 |
else:
|
| 57 |
+
print(f"Invalid FPS value '{fps_str}': {os.path.basename(video_path)}", file=sys.stderr)
|
| 58 |
|
|
|
|
| 59 |
if duration is not None and fps is not None:
|
| 60 |
return {
|
| 61 |
"duration": duration,
|
| 62 |
"fps": fps
|
| 63 |
}
|
| 64 |
+
elif duration is not None:
|
| 65 |
+
print(f"No valid FPS found (duration was {duration:.2f}s): {os.path.basename(video_path)}", file=sys.stderr)
|
| 66 |
+
return None
|
| 67 |
+
else:
|
|
|
|
| 68 |
return None
|
|
|
|
|
|
|
| 69 |
|
| 70 |
def process_video(video_path, log_file_path, log_lock):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 71 |
absolute_path = os.path.abspath(video_path)
|
| 72 |
base_name = os.path.basename(video_path)
|
| 73 |
+
status = "error"
|
| 74 |
|
| 75 |
try:
|
| 76 |
info = get_video_info(video_path)
|
| 77 |
|
| 78 |
if not info:
|
| 79 |
+
print(f"Skipping (probe error/missing info): {base_name}")
|
| 80 |
try:
|
| 81 |
os.remove(video_path)
|
| 82 |
+
print(f"Removed (probe error/missing info): {base_name}")
|
| 83 |
status = "removed"
|
| 84 |
except OSError as e:
|
| 85 |
+
print(f"Error removing {base_name}: {e}", file=sys.stderr)
|
| 86 |
status = "error"
|
| 87 |
else:
|
| 88 |
duration, fps = info["duration"], info["fps"]
|
|
|
|
| 95 |
if remove_reason:
|
| 96 |
try:
|
| 97 |
os.remove(video_path)
|
| 98 |
+
print(f"Removed ({remove_reason}): {base_name}")
|
| 99 |
status = "removed"
|
| 100 |
except OSError as e:
|
| 101 |
+
print(f"Error removing {base_name}: {e}", file=sys.stderr)
|
| 102 |
+
status = "error"
|
| 103 |
else:
|
|
|
|
| 104 |
status = "kept"
|
| 105 |
|
| 106 |
except subprocess.TimeoutExpired:
|
| 107 |
+
print(f"Timeout processing: {base_name}", file=sys.stderr)
|
| 108 |
try:
|
| 109 |
os.remove(video_path)
|
| 110 |
+
print(f"Removed (timeout): {base_name}")
|
| 111 |
status = "removed"
|
| 112 |
except OSError as e:
|
| 113 |
+
print(f"Error removing {base_name}: {e}", file=sys.stderr)
|
| 114 |
status = "error"
|
| 115 |
|
| 116 |
except Exception as e:
|
| 117 |
+
print(f"Unexpected error processing {base_name}: {e}", file=sys.stderr)
|
| 118 |
try:
|
| 119 |
os.remove(video_path)
|
| 120 |
+
print(f"Removed (unexpected error): {base_name}")
|
| 121 |
status = "removed"
|
| 122 |
except OSError as e:
|
| 123 |
+
print(f"Error removing {base_name}: {e}", file=sys.stderr)
|
| 124 |
status = "error"
|
| 125 |
|
|
|
|
| 126 |
try:
|
| 127 |
+
with log_lock:
|
| 128 |
with open(log_file_path, 'a', encoding='utf-8') as log_f:
|
| 129 |
log_f.write(absolute_path + '\n')
|
| 130 |
except IOError as e:
|
| 131 |
+
print(f"CRITICAL: Failed to write to log file {log_file_path}: {e}", file=sys.stderr)
|
| 132 |
|
| 133 |
return status, absolute_path
|
| 134 |
|
|
|
|
| 135 |
def load_processed_files(log_file_path):
|
|
|
|
| 136 |
processed = set()
|
| 137 |
if os.path.exists(log_file_path):
|
| 138 |
try:
|
|
|
|
| 140 |
for line in f:
|
| 141 |
processed.add(line.strip())
|
| 142 |
except IOError as e:
|
| 143 |
+
print(f"Warning: Could not read log file {log_file_path}: {e}", file=sys.stderr)
|
|
|
|
| 144 |
return processed
|
| 145 |
|
|
|
|
| 146 |
def filter_videos_parallel(root_dir, log_file_path=".processed_videos.log", max_workers=None):
|
|
|
|
|
|
|
|
|
|
| 147 |
if max_workers is None:
|
| 148 |
max_workers = min(os.cpu_count() * 2, 16)
|
| 149 |
|
|
|
|
| 152 |
print(f"Max workers: {max_workers}")
|
| 153 |
start_time = time.time()
|
| 154 |
|
|
|
|
| 155 |
processed_files_set = load_processed_files(log_file_path)
|
| 156 |
print(f"Loaded {len(processed_files_set)} paths from log file.")
|
| 157 |
|
|
|
|
| 158 |
video_paths_to_process = []
|
| 159 |
total_files_found = 0
|
| 160 |
for dirpath, _, files in os.walk(root_dir):
|
|
|
|
| 166 |
video_paths_to_process.append(absolute_path)
|
| 167 |
|
| 168 |
skipped_count = total_files_found - len(video_paths_to_process)
|
| 169 |
+
total_to_process_this_run = len(video_paths_to_process)
|
|
|
|
| 170 |
print(f"Found {total_files_found} total MP4 files.")
|
| 171 |
if skipped_count > 0:
|
| 172 |
print(f"Skipping {skipped_count} files already processed (found in log).")
|
|
|
|
| 177 |
|
| 178 |
print(f"Processing {total_to_process_this_run} new files...")
|
| 179 |
|
|
|
|
| 180 |
results = {"kept": 0, "removed": 0, "error": 0}
|
| 181 |
log_lock = threading.Lock()
|
| 182 |
+
completed_count = 0
|
| 183 |
+
progress_update_interval = 50
|
| 184 |
|
| 185 |
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
|
| 186 |
future_to_path = {
|
|
|
|
| 196 |
if status in results:
|
| 197 |
results[status] += 1
|
| 198 |
else:
|
| 199 |
+
print(f"Unknown status '{status}' received for {os.path.basename(original_path)}", file=sys.stderr)
|
| 200 |
results["error"] += 1
|
| 201 |
|
|
|
|
| 202 |
completed_count += 1
|
|
|
|
| 203 |
if completed_count % progress_update_interval == 0 or completed_count == total_to_process_this_run:
|
| 204 |
percent = (completed_count / total_to_process_this_run) * 100
|
|
|
|
| 205 |
progress_line = f"\rProgress: {completed_count} / {total_to_process_this_run} ({percent:.1f}%) completed. "
|
| 206 |
+
print(progress_line, end='', flush=True)
|
| 207 |
|
| 208 |
except Exception as exc:
|
| 209 |
+
print(f"\nException for {os.path.basename(original_path)} during result retrieval: {exc}", file=sys.stderr)
|
| 210 |
results["error"] += 1
|
| 211 |
+
completed_count += 1
|
|
|
|
| 212 |
try:
|
| 213 |
with log_lock:
|
| 214 |
with open(log_file_path, 'a', encoding='utf-8') as log_f:
|
| 215 |
log_f.write(os.path.abspath(original_path) + '\n')
|
| 216 |
except IOError as e:
|
| 217 |
+
print(f"CRITICAL: Failed to write to log file after exception for {original_path}: {e}", file=sys.stderr)
|
| 218 |
|
| 219 |
except KeyboardInterrupt:
|
| 220 |
+
print("\nUser interrupted. Shutting down workers...")
|
| 221 |
+
sys.exit(1)
|
| 222 |
|
| 223 |
+
print()
|
|
|
|
| 224 |
|
| 225 |
end_time = time.time()
|
| 226 |
print("\n--- Filtering Complete ---")
|
|
|
|
| 231 |
print(f"Total files skipped (already processed): {skipped_count}")
|
| 232 |
print(f"Total time for this run: {end_time - start_time:.2f} seconds")
|
| 233 |
|
|
|
|
| 234 |
if __name__ == "__main__":
|
| 235 |
target_directory = "./train/train/"
|
| 236 |
script_dir = os.path.dirname(os.path.abspath(__file__))
|
| 237 |
log_file = os.path.join(script_dir, ".processed_videos.log")
|
| 238 |
|
| 239 |
if not os.path.isdir(target_directory):
|
| 240 |
+
print(f"Error: Target directory not found: {target_directory}", file=sys.stderr)
|
| 241 |
sys.exit(1)
|
| 242 |
|
| 243 |
+
filter_videos_parallel(target_directory, log_file_path=log_file)
|