osu-everything-tools / scripts /remove_maps_v1.sh
Dan
Make dataset wrappers path-portable
a472ea0
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
prepare_dataset_root
BUCKET="${BUCKET:-hf://buckets/lekdan/osu-everything}"
BUCKET_ID="${BUCKET_ID:-lekdan/osu-everything}"
STATE_DB="${STATE_DB:-.fetcher/state.db}"
REMOVE_COUNT="${REMOVE_COUNT:-1000}"
REMOVE_WORKERS="${REMOVE_WORKERS:-4}"
UPLOAD="${UPLOAD:-1}"
LATEST_REBUILD_WORKERS="${LATEST_REBUILD_WORKERS:-8}"
mkdir -p logs .scratch
LOG="logs/remove-maps-v1-$(date -u +%Y%m%dT%H%M%SZ).log"
SUMMARY=".scratch/remove-maps-v1-summary.json"
exec > >(tee -a "$LOG") 2>&1
echo "started_at=$(date -u +%Y-%m-%dT%H:%M:%SZ)"
echo "source_root=${SOURCE_ROOT}"
echo "dataset_root=${DATASET_ROOT}"
echo "bucket=${BUCKET}"
echo "remove_count=${REMOVE_COUNT}"
echo "remove_workers=${REMOVE_WORKERS}"
echo "state_db=${STATE_DB}"
echo "upload=${UPLOAD}"
active="$(pgrep -af '[o]su_fetcher|[o]su_indexer|[i]ngest_osz|[r]ebuild_latest_snapshot|[c]ompact_metadata_v1|[u]pdate_maps_v1|[h]f( sync| buckets sync)' || true)"
if [ -n "$active" ]; then
echo "refusing to remove maps while another pipeline process is active:" >&2
echo "$active" >&2
exit 65
fi
export HF_HUB_ENABLE_HF_TRANSFER=1
export HF_XET_HIGH_PERFORMANCE=1
export HF_XET_CLIENT_AC_INITIAL_UPLOAD_CONCURRENCY="${HF_XET_CLIENT_AC_INITIAL_UPLOAD_CONCURRENCY:-4}"
export HF_XET_CLIENT_AC_MAX_UPLOAD_CONCURRENCY="${HF_XET_CLIENT_AC_MAX_UPLOAD_CONCURRENCY:-8}"
export HF_XET_CLIENT_RETRY_MAX_DURATION="${HF_XET_CLIENT_RETRY_MAX_DURATION:-600}"
"$PYTHON" "$PY_SOURCE_ROOT/python/validate_compact_v1.py" \
--repo-root "$PY_DATASET_ROOT" \
--require-archive-file-match \
--max-data-files 10000
"$PYTHON" "$PY_SOURCE_ROOT/python/remove_maps_v1.py" \
--repo-root "$PY_DATASET_ROOT" \
--count "$REMOVE_COUNT" \
--state-db "$STATE_DB" \
--clear-enumerate-state \
--workers "$REMOVE_WORKERS" \
--summary-path "$SUMMARY"
"$PYTHON" "$PY_SOURCE_ROOT/python/rebuild_latest_snapshot.py" \
--repo-root "$PY_DATASET_ROOT" \
--workers "$LATEST_REBUILD_WORKERS"
"$PYTHON" "$PY_SOURCE_ROOT/python/validate_compact_v1.py" \
--repo-root "$PY_DATASET_ROOT" \
--require-archive-file-match \
--max-data-files 10000 \
--json
"$PYTHON" "$PY_SOURCE_ROOT/python/seed_fetcher_state.py" \
--repo-root "$PY_DATASET_ROOT" \
--state-db "$STATE_DB" \
--checkpoint-only
if [ "$UPLOAD" = "1" ]; then
echo "syncing destructive archive deletes to bucket"
"$HF" sync archives "${BUCKET}/archives" --delete
"$HF" sync data "${BUCKET}/data" --delete
"$HF" sync schemas "${BUCKET}/schemas" --delete
rm -rf .scratch/fetcher-state-upload
mkdir -p .scratch/fetcher-state-upload
cp "$STATE_DB" .scratch/fetcher-state-upload/state.db
"$HF" sync .scratch/fetcher-state-upload "${BUCKET}/state/fetcher" --delete
"$HF" buckets info "$BUCKET_ID"
fi
echo "finished_at=$(date -u +%Y-%m-%dT%H:%M:%SZ)"