|
|
|
|
|
import argparse, csv, hashlib, pathlib, sys, os |
|
|
from collections import defaultdict |
|
|
|
|
|
def sha256(p, buf=1024*1024): |
|
|
h = hashlib.sha256() |
|
|
with open(p, "rb") as f: |
|
|
while chunk := f.read(buf): |
|
|
h.update(chunk) |
|
|
return h.hexdigest() |
|
|
|
|
|
def norm_header(h: str) -> str: |
|
|
return (h or "").strip().lstrip("\ufeff").lower().replace(" ", "_") |
|
|
|
|
|
def norm_path_for_match(p: str) -> str: |
|
|
|
|
|
q = p.replace("\\", "/") |
|
|
while q.startswith("./") or q.startswith(".\\"): |
|
|
q = q[2:] |
|
|
return q |
|
|
|
|
|
def main(): |
|
|
ap = argparse.ArgumentParser() |
|
|
ap.add_argument("--meta", default="data/metadata/metadata.csv", |
|
|
help="Path to metadata.csv") |
|
|
ap.add_argument("--audio-root", default="data/audio", |
|
|
help="Root folder containing audio files") |
|
|
ap.add_argument("--path-col", default=None, |
|
|
help="Column name that holds paths (default: auto-detect; prefers 'file_path')") |
|
|
ap.add_argument("--checksum-col", default="checksum_sha256", |
|
|
help="Column name to write sha256 into") |
|
|
args = ap.parse_args() |
|
|
|
|
|
root = pathlib.Path(__file__).resolve().parents[1] |
|
|
audio_root = (root / args.audio_root).resolve() |
|
|
meta_path = (root / args.meta).resolve() |
|
|
|
|
|
if not meta_path.exists(): |
|
|
sys.exit(f"[ERR] metadata file not found: {meta_path}") |
|
|
if not audio_root.exists(): |
|
|
print(f"[WARN] audio root not found yet: {audio_root}") |
|
|
|
|
|
|
|
|
print("[INFO] Scanning audio files for checksums…") |
|
|
hashmap = {} |
|
|
for p in audio_root.rglob("*"): |
|
|
if p.suffix.lower() not in {".wav", ".flac"} or not p.is_file(): |
|
|
continue |
|
|
rel = p.relative_to(root).as_posix() |
|
|
hashmap[norm_path_for_match(rel)] = sha256(p) |
|
|
|
|
|
|
|
|
print(f"[INFO] Reading metadata: {meta_path}") |
|
|
raw = meta_path.read_text(encoding="utf-8", errors="replace") |
|
|
try: |
|
|
dialect = csv.Sniffer().sniff(raw.splitlines()[0] if raw else ",") |
|
|
except Exception: |
|
|
dialect = csv.excel |
|
|
|
|
|
rows = [] |
|
|
with open(meta_path, newline="", encoding="utf-8", errors="replace") as f: |
|
|
reader = csv.reader(f, dialect) |
|
|
try: |
|
|
headers = next(reader) |
|
|
except StopIteration: |
|
|
sys.exit("[ERR] metadata.csv appears empty.") |
|
|
|
|
|
norm_headers = [norm_header(h) for h in headers] |
|
|
hdr_map = {norm_header(h): i for i, h in enumerate(headers)} |
|
|
|
|
|
|
|
|
candidate_names = [norm_header(args.path_col)] if args.path_col else [ |
|
|
"file_path", "filepath", "path", "relative_path", "audio_path", "wav", "rir_path" |
|
|
] |
|
|
path_col_norm = next((c for c in candidate_names if c in hdr_map), None) |
|
|
if not path_col_norm: |
|
|
msg = (f"[ERR] Could not find a path column. Looked for any of: " |
|
|
f"{candidate_names}. Available columns: {norm_headers}") |
|
|
sys.exit(msg) |
|
|
|
|
|
checksum_col_norm = norm_header(args.checksum_col) |
|
|
|
|
|
if checksum_col_norm not in hdr_map: |
|
|
headers.append(args.checksum_col) |
|
|
norm_headers.append(checksum_col_norm) |
|
|
checksum_idx = len(headers) - 1 |
|
|
else: |
|
|
checksum_idx = hdr_map[checksum_col_norm] |
|
|
|
|
|
path_idx = hdr_map[path_col_norm] |
|
|
|
|
|
|
|
|
rows.append(headers) |
|
|
for i, row in enumerate(reader, start=1): |
|
|
|
|
|
if len(row) < len(headers): |
|
|
row += [""] * (len(headers) - len(row)) |
|
|
|
|
|
csv_path_raw = (row[path_idx] or "").strip() |
|
|
if not csv_path_raw: |
|
|
print(f"[WARN] row {i}: empty path cell; leaving checksum blank") |
|
|
rows.append(row) |
|
|
continue |
|
|
|
|
|
|
|
|
candidates = [] |
|
|
|
|
|
|
|
|
candidates.append(norm_path_for_match(csv_path_raw)) |
|
|
|
|
|
|
|
|
p = pathlib.Path(csv_path_raw) |
|
|
if p.is_absolute(): |
|
|
try: |
|
|
rel = p.relative_to(root).as_posix() |
|
|
candidates.append(norm_path_for_match(rel)) |
|
|
except Exception: |
|
|
pass |
|
|
|
|
|
|
|
|
try: |
|
|
rel2 = (audio_root / csv_path_raw).resolve().relative_to(root).as_posix() |
|
|
candidates.append(norm_path_for_match(rel2)) |
|
|
except Exception: |
|
|
pass |
|
|
|
|
|
|
|
|
basename = pathlib.Path(csv_path_raw).name |
|
|
if basename: |
|
|
|
|
|
pass |
|
|
|
|
|
|
|
|
candidates = list(dict.fromkeys(candidates)) |
|
|
|
|
|
sha = "" |
|
|
for cand in candidates: |
|
|
sha = hashmap.get(cand, "") |
|
|
if sha: |
|
|
break |
|
|
|
|
|
|
|
|
if not sha and basename: |
|
|
matches = [v for k, v in hashmap.items() if pathlib.Path(k).name == basename] |
|
|
if len(matches) == 1: |
|
|
sha = matches[0] |
|
|
|
|
|
row[checksum_idx] = sha |
|
|
if not sha: |
|
|
print(f"[WARN] row {i}: no match for '{csv_path_raw}' (tried {len(candidates)} candidates)") |
|
|
rows.append(row) |
|
|
|
|
|
|
|
|
print(f"[INFO] Writing updated metadata with checksums → {meta_path}") |
|
|
with open(meta_path, "w", newline="", encoding="utf-8") as f: |
|
|
writer = csv.writer(f, dialect) |
|
|
writer.writerows(rows) |
|
|
|
|
|
print("[DONE] Checksums inserted. " |
|
|
f"Found hashes for ~{sum(1 for r in rows[1:] if r[checksum_idx])} rows.") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|