osu-everything-tools / python /validate_compact_v1.py
lekdan's picture
Simplify compact v1 metadata contract
be6cbb5 verified
#!/usr/bin/env python
"""Validate the compact v1 dataset layout.
The compact workflow keeps raw ``.osz`` archives plus Parquet metadata only.
It must not recreate the discarded extracted blob store, per-revision
manifests, or the old global blobs registry.
"""
from __future__ import annotations
import argparse
import json
import sys
from pathlib import Path
from typing import Any
import pyarrow.dataset as ds
def _count_rows(path: Path) -> int:
files = sorted(path.rglob("*.parquet")) if path.exists() else []
if not files:
return 0
return int(ds.dataset([str(p) for p in files], format="parquet").count_rows())
def _count_files(path: Path, pattern: str = "*") -> int:
if not path.exists():
return 0
return sum(1 for p in path.rglob(pattern) if p.is_file())
def validate_compact_v1(
repo_root: Path,
*,
schema_version: str = "v1",
require_archive_file_match: bool = False,
expected_archive_rows: int | None = None,
max_data_files: int | None = 10_000,
allow_empty: bool = False,
) -> dict[str, Any]:
repo_root = repo_root.resolve()
all_revisions = repo_root / "data" / schema_version / "all_revisions"
latest = repo_root / "data" / schema_version / "latest"
summary: dict[str, Any] = {
"archive_files": _count_files(repo_root / "archives", "*.osz"),
"data_files": _count_files(repo_root / "data"),
"blob_files": _count_files(repo_root / "blobs"),
"manifest_files": _count_files(repo_root / "manifests"),
"archive_rows": _count_rows(all_revisions / "archive_revisions"),
"set_rows": _count_rows(all_revisions / "set_revisions"),
"latest_revision_rows": _count_rows(all_revisions / "latest_revisions"),
"latest_files": _count_files(latest, "*.parquet"),
"blobs_registry_files": _count_files(all_revisions / "blobs", "*.parquet"),
"partition_dirs_in_all_revisions": sorted(
str(p.relative_to(repo_root))
for p in all_revisions.rglob("p_*=*")
if p.is_dir()
)
if all_revisions.exists()
else [],
"partition_dirs_in_latest": sorted(
str(p.relative_to(repo_root))
for p in latest.rglob("p_*=*")
if p.is_dir()
)
if latest.exists()
else [],
}
errors: list[str] = []
if expected_archive_rows is not None and summary["archive_rows"] != expected_archive_rows:
errors.append(
"archive row count mismatch: "
f"rows={summary['archive_rows']}, expected={expected_archive_rows}"
)
if require_archive_file_match and summary["archive_rows"] != summary["archive_files"]:
errors.append(
"archive row/file mismatch: "
f"rows={summary['archive_rows']}, files={summary['archive_files']}"
)
if not allow_empty:
if summary["archive_rows"] <= 0:
errors.append("archive_revisions has no rows")
if summary["set_rows"] <= 0:
errors.append("set_revisions has no rows")
if summary["latest_revision_rows"] <= 0:
errors.append("latest_revisions has no rows")
if summary["latest_files"] <= 0:
errors.append("latest/ has no parquet files")
if summary["blob_files"]:
errors.append(f"compact layout contains {summary['blob_files']} blob file(s)")
if summary["manifest_files"]:
errors.append(
f"compact layout contains {summary['manifest_files']} manifest file(s)"
)
if summary["blobs_registry_files"]:
errors.append(
"compact layout contains data/v1/all_revisions/blobs parquet files"
)
if summary["partition_dirs_in_all_revisions"]:
errors.append("all_revisions contains physical partition directories")
if summary["partition_dirs_in_latest"]:
errors.append("latest contains physical partition directories")
if max_data_files is not None and summary["data_files"] > max_data_files:
errors.append(
f"compact layout produced too many data files: "
f"{summary['data_files']} > {max_data_files}"
)
summary["ok"] = not errors
summary["errors"] = errors
return summary
def parse_args(argv: list[str] | None = None) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--repo-root", default=".")
parser.add_argument("--schema-version", default="v1")
parser.add_argument("--require-archive-file-match", action="store_true")
parser.add_argument("--expected-archive-rows", type=int, default=None)
parser.add_argument("--max-data-files", type=int, default=10_000)
parser.add_argument("--allow-empty", action="store_true")
parser.add_argument("--json", action="store_true", help="emit JSON summary")
return parser.parse_args(argv)
def main(argv: list[str] | None = None) -> int:
args = parse_args(argv)
summary = validate_compact_v1(
Path(args.repo_root),
schema_version=args.schema_version,
require_archive_file_match=args.require_archive_file_match,
expected_archive_rows=args.expected_archive_rows,
max_data_files=args.max_data_files,
allow_empty=args.allow_empty,
)
if args.json:
print(json.dumps(summary, indent=2, sort_keys=True))
else:
for key, value in summary.items():
if key in {"errors", "ok"}:
continue
print(f"{key}={value}")
if summary["errors"]:
print("errors:", file=sys.stderr)
for err in summary["errors"]:
print(f"- {err}", file=sys.stderr)
return 0 if summary["ok"] else 1
if __name__ == "__main__":
raise SystemExit(main())