#!/usr/bin/env bash #SBATCH --job-name=dwpose #SBATCH --nodes=1 #SBATCH --ntasks=1 #SBATCH --cpus-per-task=8 #SBATCH --gres=gpu:1 #SBATCH --mem=32G #SBATCH --time=24:00:00 #SBATCH --output=%x_%A_%a.out #SBATCH --error=%x_%A_%a.err set -euo pipefail ROOT_DIR="${ROOT_DIR:-/home/sf895/SignVerse-2M}" RUNTIME_ROOT="${RUNTIME_ROOT:-/home/sf895/SignVerse-2M-runtime}" STATE_ROOT="${STATE_ROOT:-/home/sf895/SignVerse-2M-runtime}" CONDA_SH="${CONDA_SH:-/home/sf895/miniconda3/etc/profile.d/conda.sh}" CONDA_ENV="${CONDA_ENV:-signx2}" RAW_VIDEO_DIR="${RAW_VIDEO_DIR:-$RUNTIME_ROOT/raw_video}" SCRATCH_RAW_VIDEO_DIR="${SCRATCH_RAW_VIDEO_DIR:-/scratch/$USER/SignVerse-2M-runtime/raw_video}" DATASET_DIR="${DATASET_DIR:-$RUNTIME_ROOT/dataset}" SCRATCH_DATASET_DIR="${SCRATCH_DATASET_DIR:-/scratch/$USER/SignVerse-2M-runtime/dataset}" STATS_NPZ="${STATS_NPZ:-$RUNTIME_ROOT/stats.npz}" STATUS_JOURNAL_PATH="${STATUS_JOURNAL_PATH:-$RUNTIME_ROOT/upload_status_journal.jsonl}" PIPELINE02="${PIPELINE02:-$ROOT_DIR/scripts/pipeline02_extract_dwpose_from_video.py}" FPS="${FPS:-24}" TMP_ROOT="${TMP_ROOT:-${SLURM_TMPDIR:-/tmp}}" FORCE_PROCESS="${FORCE_PROCESS:-0}" DELETE_SOURCE_ON_SUCCESS="${DELETE_SOURCE_ON_SUCCESS:-0}" CLAIM_DIR="${CLAIM_DIR:-$STATE_ROOT/slurm/state/claims}" RETRY_DIR="${RETRY_DIR:-$STATE_ROOT/slurm/state/gpu_init_retries}" MAX_GPU_INIT_RETRIES="${MAX_GPU_INIT_RETRIES:-3}" OPTIMIZED_MODE="${OPTIMIZED_MODE:-1}" OPTIMIZED_PROVIDER="${OPTIMIZED_PROVIDER:-cuda}" OPTIMIZED_FRAME_BATCH_SIZE="${OPTIMIZED_FRAME_BATCH_SIZE:-8}" OPTIMIZED_DETECT_RESOLUTION="${OPTIMIZED_DETECT_RESOLUTION:-512}" OPTIMIZED_FRAME_STRIDE="${OPTIMIZED_FRAME_STRIDE:-1}" OPTIMIZED_IO_BINDING="${OPTIMIZED_IO_BINDING:-1}" OPTIMIZED_GPU_DETECTOR_POSTPROCESS="${OPTIMIZED_GPU_DETECTOR_POSTPROCESS:-1}" OPTIMIZED_GPU_POSE_PREPROCESS="${OPTIMIZED_GPU_POSE_PREPROCESS:-0}" VIDEOS_PER_JOB="${VIDEOS_PER_JOB:-5}" MANIFEST="${MANIFEST:-${1:-}}" if [[ -z "$MANIFEST" ]]; then echo "MANIFEST is required (env var or first positional arg)." >&2 exit 1 fi if [[ ! -f "$MANIFEST" ]]; then echo "Manifest not found: $MANIFEST" >&2 exit 1 fi if [[ -z "${SLURM_ARRAY_TASK_ID:-}" ]]; then echo "SLURM_ARRAY_TASK_ID is required." >&2 exit 1 fi if [[ ! -f "$CONDA_SH" ]]; then echo "Missing conda init script: $CONDA_SH" >&2 exit 1 fi mapfile -t ALL_VIDEO_IDS < <(sed -n "$((SLURM_ARRAY_TASK_ID * VIDEOS_PER_JOB + 1)),$(((SLURM_ARRAY_TASK_ID + 1) * VIDEOS_PER_JOB))p" "$MANIFEST") if [[ "${#ALL_VIDEO_IDS[@]}" -eq 0 ]]; then echo "No video ids found for task index ${SLURM_ARRAY_TASK_ID} in manifest $MANIFEST" >&2 exit 1 fi mkdir -p "$CLAIM_DIR" "$RETRY_DIR" cleanup_claims() { local video_id for video_id in "${ALL_VIDEO_IDS[@]}"; do rm -f "$CLAIM_DIR/${video_id}.claim" done } trap cleanup_claims EXIT export OMP_NUM_THREADS="${SLURM_CPUS_PER_TASK:-1}" export MKL_NUM_THREADS="${SLURM_CPUS_PER_TASK:-1}" echo "[$(date '+%F %T')] job=${SLURM_JOB_ID:-na} task=${SLURM_ARRAY_TASK_ID} host=$(hostname) gpu=${CUDA_VISIBLE_DEVICES:-unset} videos=${#ALL_VIDEO_IDS[@]} first_video=${ALL_VIDEO_IDS[0]}" # shellcheck disable=SC1090 source "$CONDA_SH" CONDA_ENV_PREFIX="$(conda env list | awk '$1 == env {print $NF}' env="$CONDA_ENV")" if [[ -z "$CONDA_ENV_PREFIX" ]]; then echo "Unable to resolve conda env prefix for $CONDA_ENV" >&2 exit 1 fi export LD_LIBRARY_PATH="$CONDA_ENV_PREFIX/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}" export PATH="$CONDA_ENV_PREFIX/bin:$PATH" LIBSTDCXX_PATH="$CONDA_ENV_PREFIX/lib/libstdc++.so.6" LIBGCC_PATH="$CONDA_ENV_PREFIX/lib/libgcc_s.so.1" LD_PRELOAD_VALUE="" if [[ -f "$LIBSTDCXX_PATH" ]]; then LD_PRELOAD_VALUE="$LIBSTDCXX_PATH" fi if [[ -f "$LIBGCC_PATH" ]]; then LD_PRELOAD_VALUE="${LD_PRELOAD_VALUE:+$LD_PRELOAD_VALUE:}$LIBGCC_PATH" fi export LD_PRELOAD="${LD_PRELOAD_VALUE}${LD_PRELOAD:+:$LD_PRELOAD}" echo "Using conda env prefix=$CONDA_ENV_PREFIX" echo "Using LD_PRELOAD=$LD_PRELOAD" gpu_init_retry_state() { local video_id="$1" local attempts="$2" local host_name="$3" local gpu_id="$4" local reason="$5" local retry_state_path="$RETRY_DIR/${video_id}.state" cat > "$retry_state_path" <&1 | tee "$TMP_LOG" cmd_status=${PIPESTATUS[0]} set -e if [[ "$cmd_status" -ne 0 ]]; then npz_dir="$DATASET_DIR/$VIDEO_ID/npz" if [[ ! -d "$npz_dir" && -d "$SCRATCH_DATASET_DIR/$VIDEO_ID/npz" ]]; then npz_dir="$SCRATCH_DATASET_DIR/$VIDEO_ID/npz" fi if should_retry_gpu_init_failure "$TMP_LOG" "$npz_dir"; then attempts="$(get_retry_attempts "$VIDEO_ID")" attempts="$((attempts + 1))" last_reason="$(tail -n 80 "$TMP_LOG" | tr '\n' ' ' | sed 's/[[:space:]]\+/ /g' | cut -c1-1200)" gpu_init_retry_state "$VIDEO_ID" "$attempts" "$(hostname)" "${CUDA_VISIBLE_DEVICES:-unset}" "$last_reason" if [[ "$attempts" -ge "$MAX_GPU_INIT_RETRIES" ]]; then echo "GPU init failed on multiple GPUs; marking $VIDEO_ID as skipped after $attempts attempts." >&2 mark_retry_exhausted "$VIDEO_ID" "$attempts" "$last_reason" rm -f "$CLAIM_PATH" continue fi echo "GPU init failure for $VIDEO_ID on host=$(hostname) gpu=${CUDA_VISIBLE_DEVICES:-unset}; retry attempt $attempts/$MAX_GPU_INIT_RETRIES will be resubmitted later." >&2 overall_status="$cmd_status" break fi overall_status="$cmd_status" break fi rm -f "$RETRY_STATE_PATH" "$CLAIM_PATH" done exit "$overall_status"