File size: 3,380 Bytes
8691f4b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
@echo off
REM CUDA Setup script for Ursa Minor Smashed model (Windows)
echo π₯ Setting up CUDA environment for Ursa Minor Smashed model...
REM Check if NVIDIA GPU is available
nvidia-smi >nul 2>&1
if errorlevel 1 (
echo β ERROR: nvidia-smi not found. Make sure NVIDIA drivers are installed.
pause
exit /b 1
)
echo π Checking GPU information...
nvidia-smi
REM Get CUDA version (simplified for Windows)
echo π Please ensure you have CUDA 11.8 or 12.1 installed
REM Create virtual environment
echo π¦ Creating virtual environment...
python -m venv venv-cuda
REM Activate virtual environment
echo β
Activating virtual environment...
call venv-cuda\Scripts\activate
REM Upgrade pip
python -m pip install --upgrade pip
REM Install CUDA requirements
echo π Installing CUDA requirements...
echo This may take a few minutes as PyTorch CUDA packages are large...
REM Ask user for CUDA version
echo.
echo Please select your CUDA version:
echo 1. CUDA 11.8
echo 2. CUDA 12.1
echo 3. Auto-detect (default)
set /p cuda_choice=Enter choice (1-3, default 3):
if "%cuda_choice%"=="1" (
echo Installing PyTorch for CUDA 11.8...
pip install torch torchaudio torchvision --extra-index-url https://download.pytorch.org/whl/cu118
) else if "%cuda_choice%"=="2" (
echo Installing PyTorch for CUDA 12.1...
pip install torch torchaudio torchvision --extra-index-url https://download.pytorch.org/whl/cu121
) else (
echo Installing PyTorch for CUDA 11.8 (default)...
pip install torch torchaudio torchvision --extra-index-url https://download.pytorch.org/whl/cu118
)
REM Install remaining requirements
echo π Installing remaining dependencies...
pip install numpy>=1.24.0 tiktoken>=0.5.0 tqdm>=4.65.0
pip install gguf>=0.6.0 sentencepiece>=0.1.99 safetensors>=0.4.0 psutil>=5.8.0
pip install pynvml>=11.4.1 nvidia-ml-py3>=7.352.0
pip install matplotlib>=3.7.0 jupyter>=1.0.0
REM Test CUDA availability
echo π§ͺ Testing CUDA setup...
python -c "import torch; print(f'PyTorch version: {torch.__version__}'); print(f'CUDA available: {torch.cuda.is_available()}'); import sys; sys.exit(0 if torch.cuda.is_available() else 1)"
if errorlevel 1 (
echo β CUDA not available in PyTorch installation
pause
exit /b 1
)
python -c "import torch; print(f'CUDA device count: {torch.cuda.device_count()}'); print(f'Current CUDA device: {torch.cuda.current_device()}'); print(f'CUDA device name: {torch.cuda.get_device_name()}'); print(f'CUDA version: {torch.version.cuda}')"
REM Verify model file exists
if exist "model_optimized.pt" (
echo β
Model file found: model_optimized.pt
) else (
echo β οΈ Warning: model_optimized.pt not found in current directory
echo Make sure you have the model file in the same directory as this script
)
echo.
echo π CUDA setup complete!
echo.
echo π Usage Instructions:
echo To activate CUDA environment:
echo venv-cuda\Scripts\activate
echo.
echo To run CUDA inference:
echo python inference_cuda.py --prompt "Your prompt here"
echo.
echo To run CUDA chat:
echo python chat_cuda.py
echo.
echo To run CUDA benchmark:
echo python benchmark_cuda.py
echo.
echo π Test your setup:
echo python -c "import torch; print('CUDA available:', torch.cuda.is_available())"
pause |