@echo off REM CUDA Setup script for Ursa Minor Smashed model (Windows) echo ๐Ÿ”ฅ Setting up CUDA environment for Ursa Minor Smashed model... REM Check if NVIDIA GPU is available nvidia-smi >nul 2>&1 if errorlevel 1 ( echo โŒ ERROR: nvidia-smi not found. Make sure NVIDIA drivers are installed. pause exit /b 1 ) echo ๐Ÿ” Checking GPU information... nvidia-smi REM Get CUDA version (simplified for Windows) echo ๐Ÿ“Œ Please ensure you have CUDA 11.8 or 12.1 installed REM Create virtual environment echo ๐Ÿ“ฆ Creating virtual environment... python -m venv venv-cuda REM Activate virtual environment echo โœ… Activating virtual environment... call venv-cuda\Scripts\activate REM Upgrade pip python -m pip install --upgrade pip REM Install CUDA requirements echo ๐Ÿš€ Installing CUDA requirements... echo This may take a few minutes as PyTorch CUDA packages are large... REM Ask user for CUDA version echo. echo Please select your CUDA version: echo 1. CUDA 11.8 echo 2. CUDA 12.1 echo 3. Auto-detect (default) set /p cuda_choice=Enter choice (1-3, default 3): if "%cuda_choice%"=="1" ( echo Installing PyTorch for CUDA 11.8... pip install torch torchaudio torchvision --extra-index-url https://download.pytorch.org/whl/cu118 ) else if "%cuda_choice%"=="2" ( echo Installing PyTorch for CUDA 12.1... pip install torch torchaudio torchvision --extra-index-url https://download.pytorch.org/whl/cu121 ) else ( echo Installing PyTorch for CUDA 11.8 (default)... pip install torch torchaudio torchvision --extra-index-url https://download.pytorch.org/whl/cu118 ) REM Install remaining requirements echo ๐Ÿ“‹ Installing remaining dependencies... pip install numpy>=1.24.0 tiktoken>=0.5.0 tqdm>=4.65.0 pip install gguf>=0.6.0 sentencepiece>=0.1.99 safetensors>=0.4.0 psutil>=5.8.0 pip install pynvml>=11.4.1 nvidia-ml-py3>=7.352.0 pip install matplotlib>=3.7.0 jupyter>=1.0.0 REM Test CUDA availability echo ๐Ÿงช Testing CUDA setup... python -c "import torch; print(f'PyTorch version: {torch.__version__}'); print(f'CUDA available: {torch.cuda.is_available()}'); import sys; sys.exit(0 if torch.cuda.is_available() else 1)" if errorlevel 1 ( echo โŒ CUDA not available in PyTorch installation pause exit /b 1 ) python -c "import torch; print(f'CUDA device count: {torch.cuda.device_count()}'); print(f'Current CUDA device: {torch.cuda.current_device()}'); print(f'CUDA device name: {torch.cuda.get_device_name()}'); print(f'CUDA version: {torch.version.cuda}')" REM Verify model file exists if exist "model_optimized.pt" ( echo โœ… Model file found: model_optimized.pt ) else ( echo โš ๏ธ Warning: model_optimized.pt not found in current directory echo Make sure you have the model file in the same directory as this script ) echo. echo ๐ŸŽ‰ CUDA setup complete! echo. echo ๐Ÿ“– Usage Instructions: echo To activate CUDA environment: echo venv-cuda\Scripts\activate echo. echo To run CUDA inference: echo python inference_cuda.py --prompt "Your prompt here" echo. echo To run CUDA chat: echo python chat_cuda.py echo. echo To run CUDA benchmark: echo python benchmark_cuda.py echo. echo ๐Ÿ“Š Test your setup: echo python -c "import torch; print('CUDA available:', torch.cuda.is_available())" pause