File size: 2,898 Bytes
2bc7a9b
 
 
be96493
 
 
2bc7a9b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
be96493
 
2bc7a9b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a472ea0
 
2bc7a9b
 
 
a472ea0
 
2bc7a9b
 
 
 
 
 
a472ea0
 
2bc7a9b
 
a472ea0
 
2bc7a9b
 
 
 
a472ea0
 
2bc7a9b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
#!/usr/bin/env bash
set -euo pipefail

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
prepare_dataset_root

BUCKET="${BUCKET:-hf://buckets/lekdan/osu-everything}"
BUCKET_ID="${BUCKET_ID:-lekdan/osu-everything}"
STATE_DB="${STATE_DB:-.fetcher/state.db}"
REMOVE_COUNT="${REMOVE_COUNT:-1000}"
REMOVE_WORKERS="${REMOVE_WORKERS:-4}"
UPLOAD="${UPLOAD:-1}"
LATEST_REBUILD_WORKERS="${LATEST_REBUILD_WORKERS:-8}"

mkdir -p logs .scratch
LOG="logs/remove-maps-v1-$(date -u +%Y%m%dT%H%M%SZ).log"
SUMMARY=".scratch/remove-maps-v1-summary.json"
exec > >(tee -a "$LOG") 2>&1

echo "started_at=$(date -u +%Y-%m-%dT%H:%M:%SZ)"
echo "source_root=${SOURCE_ROOT}"
echo "dataset_root=${DATASET_ROOT}"
echo "bucket=${BUCKET}"
echo "remove_count=${REMOVE_COUNT}"
echo "remove_workers=${REMOVE_WORKERS}"
echo "state_db=${STATE_DB}"
echo "upload=${UPLOAD}"

active="$(pgrep -af '[o]su_fetcher|[o]su_indexer|[i]ngest_osz|[r]ebuild_latest_snapshot|[c]ompact_metadata_v1|[u]pdate_maps_v1|[h]f( sync| buckets sync)' || true)"
if [ -n "$active" ]; then
  echo "refusing to remove maps while another pipeline process is active:" >&2
  echo "$active" >&2
  exit 65
fi

export HF_HUB_ENABLE_HF_TRANSFER=1
export HF_XET_HIGH_PERFORMANCE=1
export HF_XET_CLIENT_AC_INITIAL_UPLOAD_CONCURRENCY="${HF_XET_CLIENT_AC_INITIAL_UPLOAD_CONCURRENCY:-4}"
export HF_XET_CLIENT_AC_MAX_UPLOAD_CONCURRENCY="${HF_XET_CLIENT_AC_MAX_UPLOAD_CONCURRENCY:-8}"
export HF_XET_CLIENT_RETRY_MAX_DURATION="${HF_XET_CLIENT_RETRY_MAX_DURATION:-600}"

"$PYTHON" "$PY_SOURCE_ROOT/python/validate_compact_v1.py" \
  --repo-root "$PY_DATASET_ROOT" \
  --require-archive-file-match \
  --max-data-files 10000

"$PYTHON" "$PY_SOURCE_ROOT/python/remove_maps_v1.py" \
  --repo-root "$PY_DATASET_ROOT" \
  --count "$REMOVE_COUNT" \
  --state-db "$STATE_DB" \
  --clear-enumerate-state \
  --workers "$REMOVE_WORKERS" \
  --summary-path "$SUMMARY"

"$PYTHON" "$PY_SOURCE_ROOT/python/rebuild_latest_snapshot.py" \
  --repo-root "$PY_DATASET_ROOT" \
  --workers "$LATEST_REBUILD_WORKERS"

"$PYTHON" "$PY_SOURCE_ROOT/python/validate_compact_v1.py" \
  --repo-root "$PY_DATASET_ROOT" \
  --require-archive-file-match \
  --max-data-files 10000 \
  --json

"$PYTHON" "$PY_SOURCE_ROOT/python/seed_fetcher_state.py" \
  --repo-root "$PY_DATASET_ROOT" \
  --state-db "$STATE_DB" \
  --checkpoint-only

if [ "$UPLOAD" = "1" ]; then
  echo "syncing destructive archive deletes to bucket"
  "$HF" sync archives "${BUCKET}/archives" --delete
  "$HF" sync data "${BUCKET}/data" --delete
  "$HF" sync schemas "${BUCKET}/schemas" --delete

  rm -rf .scratch/fetcher-state-upload
  mkdir -p .scratch/fetcher-state-upload
  cp "$STATE_DB" .scratch/fetcher-state-upload/state.db
  "$HF" sync .scratch/fetcher-state-upload "${BUCKET}/state/fetcher" --delete

  "$HF" buckets info "$BUCKET_ID"
fi

echo "finished_at=$(date -u +%Y-%m-%dT%H:%M:%SZ)"