File size: 1,810 Bytes
2d67aa6 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 | #!/bin/bash
set -euo pipefail
ENV_NAME=spec
TUNA=https://pypi.tuna.tsinghua.edu.cn/simple
CONDA=/workspace/miniconda3/bin/conda
echo "=== 1/6 创建 conda 环境 ==="
$CONDA create -n $ENV_NAME python=3.11 -y
source /workspace/miniconda3/etc/profile.d/conda.sh
conda activate $ENV_NAME
echo "=== 2/6 安装 PyTorch ==="
pip install -U pip setuptools wheel
pip install torch==2.9.1 torchvision==0.24.1 torchaudio==2.9.1 \
--index-url https://download.pytorch.org/whl/cu128
echo "=== 3/6 安装核心依赖 ==="
pip install -i $TUNA \
transformers==4.57.1 \
accelerate \
datasets \
tqdm \
peft \
safetensors \
pydantic \
numpy \
typing_extensions \
sglang==0.5.6 \
sgl-kernel \
yunchang
echo "=== 4/6 安装工具包 ==="
pip install -i $TUNA \
packaging \
ninja \
psutil
echo "=== 5/6 安装可选依赖 ==="
pip install flash-attn --no-build-isolation -i $TUNA || echo "WARNING: flash-attn 安装失败,将 fallback 到 flex_attention"
pip install wandb -i $TUNA || true
echo "=== 6/6 安装 Specforge ==="
cd /workspace/hanrui/syxin_old/Specforge
pip install -e . --no-deps
echo ""
echo "=== 验证 ==="
python -c "
import torch
print(f'PyTorch: {torch.__version__}')
print(f'CUDA: {torch.cuda.is_available()}, {torch.version.cuda}')
from torch.nn.attention.flex_attention import flex_attention
print('flex_attention: OK')
import transformers; print(f'transformers: {transformers.__version__}')
import accelerate; print(f'accelerate: {accelerate.__version__}')
import datasets; print('datasets: OK')
import peft; print(f'peft: {peft.__version__}')
import yunchang; print('yunchang: OK')
import sglang; print('sglang: OK')
import safetensors; print('safetensors: OK')
import pydantic; print('pydantic: OK')
print()
print('All good!')
"
|