osu-everything-tools / python /test_compact_v1.py
Dan
Fix beatmap file identity
fb1f0a2
from __future__ import annotations
import shutil
import tempfile
import unittest
from pathlib import Path
import pyarrow as pa
import pyarrow.parquet as pq
from compact_metadata_v1 import compact_metadata_v1
from deep_validate_compact_v1 import _primary_key_check
from ingest_osz import committed_batch_ids_for_parent
from list_batch_archives import list_batch_archive_paths
from seed_fetcher_state import numeric_latest_set_ids, seed_state
from validate_compact_v1 import validate_compact_v1
def _write(path: Path, rows: list[dict]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
pq.write_table(pa.Table.from_pylist(rows), path)
class CompactV1ValidatorTests(unittest.TestCase):
def setUp(self) -> None:
self.tmp = Path(tempfile.mkdtemp(prefix="compact-v1-"))
archive = (
self.tmp
/ "archives"
/ "sha256"
/ "aa"
/ "bb"
/ ("a" * 64 + ".osz")
)
archive.parent.mkdir(parents=True, exist_ok=True)
archive.write_bytes(b"osz")
_write(
self.tmp
/ "data"
/ "v1"
/ "all_revisions"
/ "archive_revisions"
/ "part.parquet",
[{"archive_revision_id": "a" * 64}],
)
_write(
self.tmp
/ "data"
/ "v1"
/ "all_revisions"
/ "set_revisions"
/ "part.parquet",
[{"set_revision_id": "1:" + "a" * 64}],
)
_write(
self.tmp
/ "data"
/ "v1"
/ "all_revisions"
/ "latest_revisions"
/ "part.parquet",
[{"set_key": "1", "set_revision_id": "1:" + "a" * 64}],
)
_write(
self.tmp
/ "data"
/ "v1"
/ "latest"
/ "beatmaps"
/ "part.parquet",
[{"set_revision_id": "1:" + "a" * 64}],
)
def tearDown(self) -> None:
shutil.rmtree(self.tmp)
def test_accepts_compact_layout(self) -> None:
summary = validate_compact_v1(
self.tmp,
require_archive_file_match=True,
max_data_files=100,
)
self.assertTrue(summary["ok"], summary["errors"])
self.assertEqual(summary["archive_files"], 1)
self.assertEqual(summary["archive_rows"], 1)
def test_rejects_old_blob_manifest_artifacts(self) -> None:
blob = self.tmp / "blobs" / "sha256" / "aa" / "bb" / "x.bin"
blob.parent.mkdir(parents=True, exist_ok=True)
blob.write_bytes(b"x")
manifest = self.tmp / "manifests" / "set=1" / "manifest.json"
manifest.parent.mkdir(parents=True, exist_ok=True)
manifest.write_text("{}", encoding="utf-8")
summary = validate_compact_v1(self.tmp, require_archive_file_match=True)
self.assertFalse(summary["ok"])
self.assertTrue(
any("blob file" in err for err in summary["errors"]),
summary["errors"],
)
self.assertTrue(
any("manifest file" in err for err in summary["errors"]),
summary["errors"],
)
def test_rejects_all_revisions_physical_partitions(self) -> None:
bad_dir = (
self.tmp
/ "data"
/ "v1"
/ "all_revisions"
/ "beatmaps"
/ "p_bucket=x"
)
bad_dir.mkdir(parents=True, exist_ok=True)
summary = validate_compact_v1(self.tmp, require_archive_file_match=True)
self.assertFalse(summary["ok"])
self.assertTrue(
any("physical partition" in err for err in summary["errors"]),
summary["errors"],
)
class CompactV1UpdateHelperTests(unittest.TestCase):
def setUp(self) -> None:
self.tmp = Path(tempfile.mkdtemp(prefix="compact-v1-update-"))
def tearDown(self) -> None:
shutil.rmtree(self.tmp)
def test_seed_fetcher_state_inserts_only_numeric_latest_sets(self) -> None:
_write(
self.tmp
/ "data"
/ "v1"
/ "all_revisions"
/ "latest_revisions"
/ "part.parquet",
[
{"set_key": "123", "set_revision_id": "123:" + "a" * 64},
{
"set_key": "fingerprint:abc",
"set_revision_id": "fingerprint:abc:" + "b" * 64,
},
],
)
self.assertEqual(numeric_latest_set_ids(self.tmp), {123})
summary = seed_state(self.tmp, self.tmp / ".fetcher" / "state.db")
self.assertEqual(summary["known_numeric_sets"], 1)
self.assertEqual(summary["inserted_success_rows"], 1)
# A second seed is intentionally idempotent.
summary = seed_state(self.tmp, self.tmp / ".fetcher" / "state.db")
self.assertEqual(summary["inserted_success_rows"], 0)
def test_list_batch_archive_paths_matches_parent_and_chunk_batches(self) -> None:
_write(
self.tmp
/ "data"
/ "v1"
/ "all_revisions"
/ "archive_revisions"
/ "part.parquet",
[
{
"ingest_batch_id": "batch-a",
"archive_path": "archives/sha256/aa/bb/a.osz",
},
{
"ingest_batch_id": "batch-a-chunk-0001",
"archive_path": "archives/sha256/aa/bb/b.osz",
},
{
"ingest_batch_id": "batch-b",
"archive_path": "archives/sha256/aa/bb/c.osz",
},
],
)
self.assertEqual(
list_batch_archive_paths(self.tmp, "batch-a"),
[
"archives/sha256/aa/bb/a.osz",
"archives/sha256/aa/bb/b.osz",
],
)
def test_compact_metadata_rewrites_fragments_without_losing_rows(self) -> None:
table_root = self.tmp / "data" / "v1" / "all_revisions" / "beatmaps"
_write(table_root / "part-a.parquet", [{"set_revision_id": "1:a", "x": 1}])
_write(table_root / "part-b.parquet", [{"set_revision_id": "2:b", "x": 2}])
summary = compact_metadata_v1(
self.tmp,
target_rows=10,
batch_size=10,
min_files=2,
workers=2,
)
files = sorted(table_root.glob("*.parquet"))
self.assertEqual([p.name for p in files], ["compact-00000.parquet"])
self.assertEqual(pq.read_table(files[0]).num_rows, 2)
self.assertEqual(summary["files_before"], 2)
self.assertEqual(summary["files_after"], 1)
def test_batch_reuse_guard_reads_compacted_archive_revision_rows(self) -> None:
archive_root = (
self.tmp
/ "data"
/ "v1"
/ "all_revisions"
/ "archive_revisions"
)
_write(
archive_root / "compact-00000.parquet",
[
{"ingest_batch_id": "batch-a"},
{"ingest_batch_id": "batch-a-chunk-0001"},
{"ingest_batch_id": "batch-b"},
],
)
self.assertEqual(
committed_batch_ids_for_parent(archive_root, "batch-a"),
["batch-a", "batch-a-chunk-0001"],
)
class DeepValidatorPrimaryKeyTests(unittest.TestCase):
def setUp(self) -> None:
self.tmp = Path(tempfile.mkdtemp(prefix="deep-validator-pk-"))
def tearDown(self) -> None:
shutil.rmtree(self.tmp)
def test_exact_primary_key_check_reports_duplicate_rows(self) -> None:
table_root = self.tmp / "beatmaps"
_write(
table_root / "part.parquet",
[
{"beatmap_uid": "osu-file:a.osu", "set_revision_id": "1:a"},
{"beatmap_uid": "osu-file:a.osu", "set_revision_id": "1:a"},
],
)
summary = _primary_key_check(
table_root,
("beatmap_uid", "set_revision_id"),
row_count=2,
exact_row_limit=10,
batch_size=1,
)
self.assertTrue(summary["checked"])
self.assertEqual(summary["mode"], "exact")
self.assertEqual(summary["duplicate_rows"], 1)
def test_large_beatmap_scoped_primary_key_check_streams_groups(self) -> None:
table_root = self.tmp / "hit_objects_common"
_write(
table_root / "part.parquet",
[
{
"beatmap_uid": "osu-file:a.osu",
"set_revision_id": "1:a",
"object_index": 0,
},
{
"beatmap_uid": "osu-file:a.osu",
"set_revision_id": "1:a",
"object_index": 0,
},
{
"beatmap_uid": "osu-file:a.osu",
"set_revision_id": "1:a",
"object_index": 1,
},
],
)
summary = _primary_key_check(
table_root,
("beatmap_uid", "set_revision_id", "object_index"),
row_count=3,
exact_row_limit=0,
batch_size=1,
)
self.assertTrue(summary["checked"])
self.assertEqual(summary["mode"], "beatmap_scoped_stream")
self.assertEqual(summary["duplicate_rows"], 1)
if __name__ == "__main__":
unittest.main()