File size: 4,796 Bytes
406a8a0 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 | #!/usr/bin/env python3
"""Package release data directories into per-community tar.zst archives."""
from __future__ import annotations
import argparse
import hashlib
import json
import shutil
import subprocess
from datetime import datetime, timezone
from pathlib import Path
HF_DIR = Path(__file__).resolve().parents[1]
DATA_DIR = HF_DIR / "data"
ARCHIVES_DIR = HF_DIR / "archives"
ARCHIVES_MANIFEST = HF_DIR / "archives_manifest.json"
RELEASE_MANIFEST = HF_DIR / "release_manifest.json"
def community_key(path: Path) -> int:
return int(path.name.rsplit("_", 1)[1])
def sha256_file(path: Path, chunk_size: int = 1024 * 1024 * 16) -> str:
digest = hashlib.sha256()
with path.open("rb") as handle:
for chunk in iter(lambda: handle.read(chunk_size), b""):
digest.update(chunk)
return digest.hexdigest()
def load_json(path: Path) -> dict:
if not path.exists():
return {}
return json.loads(path.read_text(encoding="utf-8"))
def write_json(path: Path, data: object) -> None:
path.write_text(
json.dumps(data, ensure_ascii=False, indent=2) + "\n",
encoding="utf-8",
)
def archive_is_current(record: dict | None, archive_path: Path) -> bool:
return (
bool(record)
and archive_path.exists()
and record.get("size_bytes") == archive_path.stat().st_size
and bool(record.get("sha256"))
)
def build_archive(community_dir: Path, archive_path: Path, compression_level: int) -> None:
archive_path.parent.mkdir(parents=True, exist_ok=True)
tmp_path = archive_path.with_suffix(archive_path.suffix + ".tmp")
if tmp_path.exists():
tmp_path.unlink()
compressor = f"zstd -T0 -{compression_level}"
cmd = [
"tar",
"--use-compress-program",
compressor,
"-cf",
str(tmp_path.relative_to(HF_DIR)),
str(community_dir.relative_to(HF_DIR)),
]
subprocess.run(cmd, cwd=HF_DIR, check=True)
tmp_path.replace(archive_path)
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("--level", type=int, default=6, help="zstd compression level")
parser.add_argument("--force", action="store_true", help="rebuild existing archives")
args = parser.parse_args()
if not shutil.which("zstd"):
raise SystemExit("zstd is required but was not found on PATH")
community_dirs = sorted(
(path for path in DATA_DIR.glob("community_*") if path.is_dir()),
key=community_key,
)
existing_manifest = load_json(ARCHIVES_MANIFEST)
existing_records = {
record["community_id"]: record
for record in existing_manifest.get("archives", [])
if "community_id" in record
}
records = []
for index, community_dir in enumerate(community_dirs, start=1):
community_id = community_dir.name
archive_path = ARCHIVES_DIR / f"{community_id}.tar.zst"
existing_record = existing_records.get(community_id)
print(f"[{index}/{len(community_dirs)}] {community_id}", flush=True)
if args.force or not archive_is_current(existing_record, archive_path):
build_archive(community_dir, archive_path, args.level)
size_bytes = archive_path.stat().st_size
digest = (
existing_record["sha256"]
if archive_is_current(existing_record, archive_path)
else sha256_file(archive_path)
)
records.append(
{
"community_id": community_id,
"archive_path": f"archives/{archive_path.name}",
"extracts_to": f"data/{community_id}",
"size_bytes": size_bytes,
"sha256": digest,
}
)
manifest = {
"created_at": datetime.now(timezone.utc).isoformat(),
"compression": {
"format": "tar.zst",
"zstd_level": args.level,
"command_template": "tar --use-compress-program 'zstd -T0 -LEVEL' -cf archives/community_N.tar.zst data/community_N",
},
"num_archives": len(records),
"total_size_bytes": sum(record["size_bytes"] for record in records),
"archives": records,
}
write_json(ARCHIVES_MANIFEST, manifest)
release_manifest = load_json(RELEASE_MANIFEST)
if release_manifest:
release_manifest["archives_manifest"] = "archives_manifest.json"
release_manifest["archive_policy"] = (
"Data is published as one tar.zst archive per release community. "
"Each archive extracts to the data_path referenced by the question JSON."
)
write_json(RELEASE_MANIFEST, release_manifest)
print(json.dumps(manifest, ensure_ascii=False, indent=2), flush=True)
if __name__ == "__main__":
main()
|