File size: 4,002 Bytes
f8e648b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 | #!/usr/bin/env bash
# =============================================================================
# setup_universal.sh β Install all dependencies for the universal pipeline
# =============================================================================
set -euo pipefail
GRN='\033[0;32m'; NC='\033[0m'
log() { echo -e "${GRN}[setup]${NC} $*"; }
# ββ System packages βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
log "Installing system packages..."
sudo apt-get update -qq
sudo apt-get install -y --no-install-recommends \
aria2 \
parallel \
pigz \
pv \
jq \
bc \
curl wget ca-certificates \
libgl1 libglib2.0-0 # needed by Pillow on headless servers
# Silence GNU Parallel citation nag
mkdir -p ~/.parallel && touch ~/.parallel/will-cite
# ββ Python packages βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
log "Installing Python packages..."
pip install --upgrade pip --quiet
pip install --quiet \
datasets \
diffusers \
transformers \
accelerate \
torch torchvision \
pyarrow \
Pillow \
tqdm \
huggingface_hub \
safetensors \
webdataset \
pyarrow \
orjson \
requests
# ββ HuggingFace login βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
HF_TOKEN_FILE="$HOME/.cache/huggingface/token"
if [[ ! -f "$HF_TOKEN_FILE" ]]; then
log "No HuggingFace token found. Run: huggingface-cli login"
else
log "HuggingFace token found β"
fi
# ββ Directory scaffold ββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
BASE_DIR="${BASE_DIR:-/workspace/hem/dataset_output}"
log "Creating base directory structure: $BASE_DIR"
mkdir -p \
"${BASE_DIR}/images/original" \
"${BASE_DIR}/captions/shards" \
"${BASE_DIR}/metadata/processing_logs/worker_logs" \
"${BASE_DIR}/raw_shards" \
"${BASE_DIR}/logs"
for res in 256 512 1024; do
mkdir -p "${BASE_DIR}/images/${res}x${res}"
mkdir -p "${BASE_DIR}/latents/sd-vae-${res}"
done
# Seed empty files
CAPS="${BASE_DIR}/captions/captions.json"
[[ -f "$CAPS" ]] || echo '{}' > "$CAPS"
META="${BASE_DIR}/metadata/dataset_info.json"
if [[ ! -f "$META" ]]; then
cat > "$META" <<JSON
{
"hf_dataset": null,
"hf_split": "train",
"resolutions": [256, 512],
"vae_model": "stabilityai/sd-vae-ft-ema",
"processed_count": 0,
"failed_count": 0,
"last_run": null
}
JSON
fi
# ββ Verify format_detector is importable ββββββββββββββββββββββββββββββββββββββ
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [[ -f "$SCRIPT_DIR/format_detector.py" ]]; then
log "Verifying format_detector.py..."
python3 -c "from format_detector import detect; print(' format_detector OK')" \
|| echo " [warn] Could not import format_detector β ensure it is in the same folder"
fi
log ""
log "Setup complete β"
log ""
log "Quick start:"
log " huggingface-cli login"
log " HF_DATASET=LLAAMM/text2image1m bash run_pipeline_universal.sh"
log ""
log "Other examples:"
log " HF_DATASET=laion/laion400m JOBS=8 CUDA_DEVICES=0,1,2,3 bash run_pipeline_universal.sh"
log " HF_DATASET=poloclub/diffusiondb RESOLUTIONS='256' NO_LATENTS=1 bash run_pipeline_universal.sh"
log " HF_DATASET=timbrooks/instructpix2pix-clip-filtered bash run_pipeline_universal.sh"
log ""
log "Test run (3 shards, 100 samples each, no VAE):"
log " HF_DATASET=your/dataset MAX_SHARDS=3 MAX_SAMPLES=100 NO_LATENTS=1 bash run_pipeline_universal.sh"
|