Update startup.sh
Browse files- startup.sh +30 -12
startup.sh
CHANGED
|
@@ -1,21 +1,39 @@
|
|
| 1 |
-
#!/bin/bash
|
| 2 |
# startup.sh - BackgroundFX Pro startup script
|
| 3 |
-
#
|
| 4 |
|
| 5 |
-
|
| 6 |
-
export OMP_NUM_THREADS=4
|
| 7 |
-
export MKL_NUM_THREADS=4
|
| 8 |
|
| 9 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
export PYTHONUNBUFFERED=1
|
| 11 |
-
export GRADIO_SERVER_NAME="0.0.0.0"
|
| 12 |
-
export GRADIO_SERVER_PORT="7860"
|
| 13 |
|
| 14 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
echo "===== BackgroundFX Pro Starting ====="
|
| 16 |
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
|
| 17 |
echo "MKL_NUM_THREADS=$MKL_NUM_THREADS"
|
| 18 |
-
echo "=
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
-
#
|
| 21 |
-
python app.py
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
# startup.sh - BackgroundFX Pro startup script
|
| 3 |
+
# Ensure all env vars are set BEFORE Python imports torch.
|
| 4 |
|
| 5 |
+
set -Eeuo pipefail
|
|
|
|
|
|
|
| 6 |
|
| 7 |
+
# ββ CUDA allocator & MatAnyone scaling (fix fragmentation / VRAM pressure) ββ
|
| 8 |
+
export PYTORCH_CUDA_ALLOC_CONF="${PYTORCH_CUDA_ALLOC_CONF:-expandable_segments:True,max_split_size_mb:128}"
|
| 9 |
+
export MATANYONE_MAX_EDGE="${MATANYONE_MAX_EDGE:-640}"
|
| 10 |
+
export MATANYONE_TARGET_PIXELS="${MATANYONE_TARGET_PIXELS:-400000}"
|
| 11 |
+
|
| 12 |
+
# ββ Threads / general ββ
|
| 13 |
+
export OMP_NUM_THREADS="${OMP_NUM_THREADS:-4}"
|
| 14 |
+
export MKL_NUM_THREADS="${MKL_NUM_THREADS:-4}"
|
| 15 |
export PYTHONUNBUFFERED=1
|
|
|
|
|
|
|
| 16 |
|
| 17 |
+
# ββ Gradio ββ
|
| 18 |
+
export GRADIO_SERVER_NAME="${GRADIO_SERVER_NAME:-0.0.0.0}"
|
| 19 |
+
export GRADIO_SERVER_PORT="${GRADIO_SERVER_PORT:-7860}"
|
| 20 |
+
|
| 21 |
+
# ββ Hugging Face caches (keeps downloads local to repo) ββ
|
| 22 |
+
export HF_HOME="${HF_HOME:-$PWD/checkpoints/hf}"
|
| 23 |
+
export TRANSFORMERS_CACHE="${TRANSFORMERS_CACHE:-$HF_HOME}"
|
| 24 |
+
|
| 25 |
+
# ββ Banner ββ
|
| 26 |
echo "===== BackgroundFX Pro Starting ====="
|
| 27 |
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
|
| 28 |
echo "MKL_NUM_THREADS=$MKL_NUM_THREADS"
|
| 29 |
+
echo "PYTORCH_CUDA_ALLOC_CONF=$PYTORCH_CUDA_ALLOC_CONF"
|
| 30 |
+
echo "MATANYONE_MAX_EDGE=$MATANYONE_MAX_EDGE"
|
| 31 |
+
echo "MATANYONE_TARGET_PIXELS=$MATANYONE_TARGET_PIXELS"
|
| 32 |
+
echo "HF_HOME=$HF_HOME"
|
| 33 |
+
echo "GRADIO_SERVER_NAME=$GRADIO_SERVER_NAME"
|
| 34 |
+
echo "GRADIO_SERVER_PORT=$GRADIO_SERVER_PORT"
|
| 35 |
+
echo "====================================="
|
| 36 |
+
command -v nvidia-smi >/dev/null 2>&1 && nvidia-smi || true
|
| 37 |
|
| 38 |
+
# ββ Launch ββ
|
| 39 |
+
exec python app.py
|