Spaces:
Sleeping
Sleeping
| # PyTorch with CUDA 12.8 (for Windows/Linux) | |
| torch==2.9.1 | |
| torchaudio==2.9.1 | |
| torchvision==0.24.1 | |
| # Core dependencies | |
| transformers>=4.51.0,<4.58.0 | |
| diffusers | |
| gradio==6.2.0 | |
| matplotlib>=3.7.5 | |
| scipy>=1.10.1 | |
| soundfile>=0.13.1 | |
| ffmpeg-python | |
| loguru>=0.7.3 | |
| einops>=0.8.1 | |
| accelerate>=1.12.0 | |
| fastapi>=0.110.0 | |
| diskcache | |
| uvicorn[standard]>=0.27.0 | |
| numba>=0.63.1 | |
| vector-quantize-pytorch>=1.27.15 | |
| # torchcodec>=0.9.1 # Disabled: causes CUDA dependency issues on HuggingFace Space | |
| # LoRA Training dependencies (optional) | |
| peft>=0.7.0 | |
| lightning>=2.0.0 | |
| # nano-vllm dependencies | |
| triton-windows>=3.0.0,<3.4; sys_platform == 'win32' | |
| triton>=3.0.0; sys_platform != 'win32' | |
| flash-attn @ https://github.com/sdbds/flash-attention-for-windows/releases/download/2.8.2/flash_attn-2.8.2+cu128torch2.7.1cxx11abiFALSEfullbackward-cp311-cp311-win_amd64.whl ; sys_platform == 'win32' and python_version == '3.11' and platform_machine == 'AMD64' | |
| flash-attn @ https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.7.12/flash_attn-2.8.3+cu128torch2.10-cp311-cp311-linux_x86_64.whl ; sys_platform == 'linux' and python_version == '3.11' | |
| # Kernels library for flash-attn3 (preferred over flash-attn when available) | |
| kernels | |
| xxhash | |
| # HuggingFace Space required | |
| spaces | |
| huggingface_hub>=0.20.0 | |