Spaces:
Running
Running
Delete start_gradio.sh
Browse files- start_gradio.sh +0 -97
start_gradio.sh
DELETED
|
@@ -1,97 +0,0 @@
|
|
| 1 |
-
#!/bin/bash
|
| 2 |
-
# 启动脚本:可选下载模型,并直接启动 Gradio 应用(内部调用 vLLM.LLM)
|
| 3 |
-
set -euo pipefail
|
| 4 |
-
|
| 5 |
-
MODEL_REPO="${MODEL_REPO:-stepfun-ai/Step-Audio-2-mini-Think}"
|
| 6 |
-
MODEL_DIR="${MODEL_DIR:-/tmp/app/models/Step-Audio-2-mini-Think}"
|
| 7 |
-
PRELOAD_MODEL="${PRELOAD_MODEL:-1}"
|
| 8 |
-
GRADIO_PORT=${GRADIO_PORT:-7860}
|
| 9 |
-
HOST=${HOST:-0.0.0.0}
|
| 10 |
-
TENSOR_PARALLEL_SIZE=${TENSOR_PARALLEL_SIZE:-4}
|
| 11 |
-
MAX_MODEL_LEN=${MAX_MODEL_LEN:-8192}
|
| 12 |
-
GPU_MEMORY_UTILIZATION=${GPU_MEMORY_UTILIZATION:-0.9}
|
| 13 |
-
TOKENIZER_MODE=${TOKENIZER_MODE:-step_audio_2}
|
| 14 |
-
SERVED_MODEL_NAME=${SERVED_MODEL_NAME:-step-audio-2-mini-think}
|
| 15 |
-
|
| 16 |
-
# 新增:设置缓存目录到可写位置
|
| 17 |
-
export HF_HOME="${HF_HOME:-/tmp/hf_cache}"
|
| 18 |
-
export XDG_CACHE_HOME="${XDG_CACHE_HOME:-/tmp/hf_cache}"
|
| 19 |
-
|
| 20 |
-
echo "=========================================="
|
| 21 |
-
echo "Step Audio 2 Gradio 启动脚本"
|
| 22 |
-
echo "MODEL_REPO: $MODEL_REPO"
|
| 23 |
-
echo "MODEL_DIR : $MODEL_DIR"
|
| 24 |
-
echo "PRELOAD_MODEL: $PRELOAD_MODEL"
|
| 25 |
-
echo "HOST/PORT: $HOST:$GRADIO_PORT"
|
| 26 |
-
echo "TP: $TENSOR_PARALLEL_SIZE | MAX_LEN: $MAX_MODEL_LEN"
|
| 27 |
-
echo "缓存目录: $HF_HOME"
|
| 28 |
-
echo "=========================================="
|
| 29 |
-
|
| 30 |
-
download_model() {
|
| 31 |
-
# 创建必要的目录
|
| 32 |
-
mkdir -p "$MODEL_DIR"
|
| 33 |
-
mkdir -p "$HF_HOME"
|
| 34 |
-
|
| 35 |
-
echo "[Download] 开始下载模型到: $MODEL_DIR"
|
| 36 |
-
echo "[Download] 缓存目录: $HF_HOME"
|
| 37 |
-
|
| 38 |
-
# 优先尝试使用 hf 命令(新版本推荐)
|
| 39 |
-
if command -v hf &> /dev/null; then
|
| 40 |
-
echo "[Download] 使用 hf download 命令"
|
| 41 |
-
hf download "$MODEL_REPO" --local-dir "$MODEL_DIR" --cache-dir "$HF_HOME"
|
| 42 |
-
elif command -v huggingface-cli &> /dev/null; then
|
| 43 |
-
echo "[Download] 使用 huggingface-cli"
|
| 44 |
-
huggingface-cli download "$MODEL_REPO" --local-dir "$MODEL_DIR" --local-dir-use-symlinks False
|
| 45 |
-
fi
|
| 46 |
-
}
|
| 47 |
-
|
| 48 |
-
if [[ "$PRELOAD_MODEL" == "1" ]]; then
|
| 49 |
-
# 检查模型是否完整(检查关键文件)
|
| 50 |
-
if [[ ! -d "$MODEL_DIR" ]] || [[ ! -f "$MODEL_DIR/config.json" ]] || [[ ! -f "$MODEL_DIR/model.safetensors.index.json" ]]; then
|
| 51 |
-
echo "模型未就绪或文件不完整,开始下载..."
|
| 52 |
-
download_model
|
| 53 |
-
else
|
| 54 |
-
echo "检测到本地模型: $MODEL_DIR"
|
| 55 |
-
echo "模型文件检查通过"
|
| 56 |
-
fi
|
| 57 |
-
export MODEL_PATH="$MODEL_DIR"
|
| 58 |
-
else
|
| 59 |
-
echo "跳过预下载,直接使用仓库名称加载"
|
| 60 |
-
export MODEL_PATH="${MODEL_PATH:-$MODEL_REPO}"
|
| 61 |
-
fi
|
| 62 |
-
|
| 63 |
-
# 验证下载结果
|
| 64 |
-
if [[ "$PRELOAD_MODEL" == "1" ]]; then
|
| 65 |
-
echo "=== 模型文件验证 ==="
|
| 66 |
-
ls -la "$MODEL_DIR" | head -10
|
| 67 |
-
if [[ -f "$MODEL_DIR/config.json" ]]; then
|
| 68 |
-
echo "✓ config.json 存在"
|
| 69 |
-
else
|
| 70 |
-
echo "✗ config.json 缺失"
|
| 71 |
-
fi
|
| 72 |
-
if [[ -f "$MODEL_DIR/model.safetensors.index.json" ]]; then
|
| 73 |
-
echo "✓ model.safetensors.index.json 存在"
|
| 74 |
-
else
|
| 75 |
-
echo "✗ model.safetensors.index.json 缺失"
|
| 76 |
-
fi
|
| 77 |
-
echo "==================="
|
| 78 |
-
fi
|
| 79 |
-
|
| 80 |
-
echo "模型路径: ${MODEL_PATH}"
|
| 81 |
-
echo "启动 Gradio..."
|
| 82 |
-
|
| 83 |
-
PYTHON_BIN="${PYTHON_BIN:-python3}"
|
| 84 |
-
if ! command -v "$PYTHON_BIN" >/dev/null 2>&1; then
|
| 85 |
-
echo "未找到 Python 解释器(当前设置: $PYTHON_BIN),请确认镜像已安装 python3。"
|
| 86 |
-
exit 1
|
| 87 |
-
fi
|
| 88 |
-
|
| 89 |
-
"$PYTHON_BIN" app.py \
|
| 90 |
-
--host "$HOST" \
|
| 91 |
-
--port "$GRADIO_PORT" \
|
| 92 |
-
--model "$MODEL_PATH" \
|
| 93 |
-
--tensor-parallel-size "$TENSOR_PARALLEL_SIZE" \
|
| 94 |
-
--max-model-len "$MAX_MODEL_LEN" \
|
| 95 |
-
--gpu-memory-utilization "$GPU_MEMORY_UTILIZATION" \
|
| 96 |
-
--tokenizer-mode "$TOKENIZER_MODE" \
|
| 97 |
-
--served-model-name "$SERVED_MODEL_NAME"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|