#!/usr/bin/env bash #SBATCH --job-name=upload #SBATCH --nodes=1 #SBATCH --ntasks=1 #SBATCH --cpus-per-task=2 #SBATCH --mem=8G #SBATCH --time=24:00:00 #SBATCH --output=%x_%A_%a.out #SBATCH --error=%x_%A_%a.err set -euo pipefail ROOT_DIR="${ROOT_DIR:-/home/sf895/SignVerse-2M}" RUNTIME_ROOT="${RUNTIME_ROOT:-/home/sf895/SignVerse-2M-runtime}" CONDA_SH="${CONDA_SH:-/home/sf895/miniconda3/etc/profile.d/conda.sh}" CONDA_ENV="${CONDA_ENV:-signx2}" PIPELINE03="${PIPELINE03:-$ROOT_DIR/scripts/pipeline03_upload_to_huggingface.py}" DATASET_DIR="${DATASET_DIR:-$RUNTIME_ROOT/dataset}" SCRATCH_DATASET_DIR="${SCRATCH_DATASET_DIR:-/scratch/$USER/SignVerse-2M-runtime/dataset}" RAW_VIDEO_DIR="${RAW_VIDEO_DIR:-$RUNTIME_ROOT/raw_video}" SCRATCH_RAW_VIDEO_DIR="${SCRATCH_RAW_VIDEO_DIR:-/scratch/$USER/SignVerse-2M-runtime/raw_video}" RAW_CAPTION_DIR="${RAW_CAPTION_DIR:-$RUNTIME_ROOT/raw_caption}" RAW_METADATA_DIR="${RAW_METADATA_DIR:-$RUNTIME_ROOT/raw_metadata}" ARCHIVE_DIR="${ARCHIVE_DIR:-$ROOT_DIR/archives}" PROGRESS_JSON="${PROGRESS_JSON:-$RUNTIME_ROOT/archive_upload_progress.json}" STATUS_JOURNAL_PATH="${STATUS_JOURNAL_PATH:-$RUNTIME_ROOT/upload_status_journal.jsonl}" STATS_NPZ="${STATS_NPZ:-$RUNTIME_ROOT/stats.npz}" REPO_ID="${REPO_ID:-SignerX/SignVerse-2M}" REPO_REVISION="${REPO_REVISION:-dev}" TARGET_BYTES="${TARGET_BYTES:-10737418240}" TARGET_FOLDERS="${TARGET_FOLDERS:-40}" PARALLEL_SHARDS="${PARALLEL_SHARDS:-1}" START_STAGGER_MIN="${START_STAGGER_MIN:-1}" START_STAGGER_MAX="${START_STAGGER_MAX:-3}" ALLOW_SMALL_FINAL_BATCH="${ALLOW_SMALL_FINAL_BATCH:-0}" REQUIRE_TARGET_BYTES="${REQUIRE_TARGET_BYTES:-1}" DRY_RUN_UPLOAD="${DRY_RUN_UPLOAD:-0}" UPLOAD_MODE="${UPLOAD_MODE:-api}" if [[ ! -f "$CONDA_SH" ]]; then echo "Missing conda init script: $CONDA_SH" >&2 exit 1 fi if [[ -z "${SLURM_ARRAY_TASK_ID:-}" ]]; then echo "SLURM_ARRAY_TASK_ID is required." >&2 exit 1 fi if (( PARALLEL_SHARDS < 1 )); then echo "PARALLEL_SHARDS must be >= 1" >&2 exit 1 fi if (( SLURM_ARRAY_TASK_ID < 0 || SLURM_ARRAY_TASK_ID >= PARALLEL_SHARDS )); then echo "SLURM_ARRAY_TASK_ID=$SLURM_ARRAY_TASK_ID is out of range for PARALLEL_SHARDS=$PARALLEL_SHARDS" >&2 exit 1 fi echo "[$(date '+%F %T')] upload shard=${SLURM_ARRAY_TASK_ID}/${PARALLEL_SHARDS} host=$(hostname)" # shellcheck disable=SC1090 source "$CONDA_SH" cmd=(python -u "$PIPELINE03" --dataset-dir "$DATASET_DIR" --scratch-dataset-dir "$SCRATCH_DATASET_DIR" --raw-video-dir "$RAW_VIDEO_DIR" --scratch-raw-video-dir "$SCRATCH_RAW_VIDEO_DIR" --raw-caption-dir "$RAW_CAPTION_DIR" --raw-metadata-dir "$RAW_METADATA_DIR" --archive-dir "$ARCHIVE_DIR" --progress-path "$PROGRESS_JSON" --stats-npz "$STATS_NPZ" --status-journal-path "$STATUS_JOURNAL_PATH" --repo-id "$REPO_ID" --repo-revision "$REPO_REVISION" --target-bytes "$TARGET_BYTES" --target-folders "$TARGET_FOLDERS" --parallel-shards "$PARALLEL_SHARDS" --shard-index "$SLURM_ARRAY_TASK_ID" --start-stagger-min "$START_STAGGER_MIN" --start-stagger-max "$START_STAGGER_MAX" --upload-mode "$UPLOAD_MODE" ) if [[ "$ALLOW_SMALL_FINAL_BATCH" == "1" ]]; then cmd+=(--allow-small-final-batch) fi if [[ "$REQUIRE_TARGET_BYTES" != "1" ]]; then cmd+=(--allow-small-final-batch) fi if [[ "$DRY_RUN_UPLOAD" == "1" ]]; then cmd+=(--dry-run) fi cmd+=(--skip-stats-write) CONDA_NO_PLUGINS=true conda run -n "$CONDA_ENV" "${cmd[@]}"