| # ============================================================================= | |
| # Fragmenta β Hugging Face Spaces Dockerfile (CPU + GPU) | |
| # ============================================================================= | |
| # Deploy as a Docker Space on Hugging Face. | |
| # Works on both free CPU Spaces and paid GPU Spaces. | |
| # | |
| # CPU Spaces: torch.cuda.is_available() β False β runs on CPU | |
| # GPU Spaces: NVIDIA runtime injected by HF β torch.cuda.is_available() β True | |
| # | |
| # Port: 7860 (HF Spaces requirement) | |
| # ============================================================================= | |
| # --------------------------------------------------------------------------- | |
| # Stage 1: Build the React frontend | |
| # --------------------------------------------------------------------------- | |
| FROM node:20-slim AS frontend-builder | |
| WORKDIR /build/frontend | |
| COPY app/frontend/package.json app/frontend/package-lock.json* ./ | |
| RUN npm ci --prefer-offline --no-audit 2>/dev/null || npm install | |
| COPY app/frontend/ ./ | |
| RUN npm run build | |
| # --------------------------------------------------------------------------- | |
| # Stage 2: Python backend (CPU + GPU via bundled CUDA in PyTorch wheels) | |
| # --------------------------------------------------------------------------- | |
| FROM python:3.11-slim-bookworm | |
| ENV DEBIAN_FRONTEND=noninteractive | |
| # System deps (audio processing, networking) | |
| RUN apt-get update && apt-get install -y --no-install-recommends \ | |
| libsndfile1 \ | |
| ffmpeg \ | |
| git \ | |
| curl \ | |
| && apt-get clean && rm -rf /var/lib/apt/lists/* | |
| RUN pip install --no-cache-dir --root-user-action=ignore --upgrade pip setuptools wheel | |
| WORKDIR /app | |
| # --------------------------------------------------------------------------- | |
| # Python dependencies | |
| # --------------------------------------------------------------------------- | |
| COPY requirements.txt . | |
| # Install PyTorch with CUDA support (default wheels bundle CUDA runtime). | |
| # On GPU Spaces the NVIDIA driver is injected by the host so CUDA works. | |
| # On CPU Spaces it gracefully falls back to CPU β no extra cost. | |
| RUN pip install --no-cache-dir --root-user-action=ignore torch torchvision torchaudio | |
| # Install remaining requirements (skip desktop-only & CUDA index lines) | |
| RUN grep -ivE 'pyqt6|pyqt6-webengine|flash-attn|extra-index-url' requirements.txt > requirements_docker.txt \ | |
| && pip install --no-cache-dir --root-user-action=ignore -r requirements_docker.txt \ | |
| && rm requirements_docker.txt | |
| # --------------------------------------------------------------------------- | |
| # Application code | |
| # --------------------------------------------------------------------------- | |
| COPY . . | |
| COPY --from=frontend-builder /build/frontend/build ./app/frontend/build | |
| # Install stable-audio-tools in-tree | |
| RUN pip install --no-cache-dir --root-user-action=ignore -e ./stable-audio-tools/ | |
| # Create writable directories | |
| RUN mkdir -p /app/models/pretrained \ | |
| /app/models/fine_tuned \ | |
| /app/models/config \ | |
| /app/data \ | |
| /app/output \ | |
| /app/logs \ | |
| /app/config | |
| # --------------------------------------------------------------------------- | |
| # Download pretrained model weights from HF model repo | |
| # (Keeps the Space repo under the 1 GB limit) | |
| # --------------------------------------------------------------------------- | |
| RUN pip install --no-cache-dir --root-user-action=ignore huggingface_hub \ | |
| && HF_HUB_DISABLE_TELEMETRY=1 python -c "\ | |
| from huggingface_hub import hf_hub_download; \ | |
| hf_hub_download('MazCodes/fragmenta-models', 'stable-audio-open-small-model.safetensors', local_dir='/app/models/pretrained'); \ | |
| hf_hub_download('MazCodes/fragmenta-models', 'stable-audio-open-model.safetensors', local_dir='/app/models/pretrained'); \ | |
| print('Models downloaded successfully')" | |
| # Save default model configs (restored at runtime if dirs are empty) | |
| RUN mkdir -p /opt/fragmenta-defaults/models/config \ | |
| && if [ -d /app/models/config ] && ls /app/models/config/*.json 1>/dev/null 2>&1; then \ | |
| cp /app/models/config/*.json /opt/fragmenta-defaults/models/config/; \ | |
| fi | |
| # --------------------------------------------------------------------------- | |
| # HF Spaces runs as user "user" (uid 1000) β ensure write permissions | |
| # --------------------------------------------------------------------------- | |
| COPY docker-entrypoint.sh /docker-entrypoint.sh | |
| RUN chmod +x /docker-entrypoint.sh | |
| RUN useradd -m -u 1000 user || true | |
| RUN chown -R 1000:1000 /app /opt/fragmenta-defaults | |
| USER user | |
| # --------------------------------------------------------------------------- | |
| # Configuration β HF Spaces requires port 7860 | |
| # --------------------------------------------------------------------------- | |
| ENV FLASK_HOST=0.0.0.0 | |
| ENV FLASK_PORT=7860 | |
| ENV FRAGMENTA_LOG_LEVEL=INFO | |
| ENV FRAGMENTA_DOCKER=1 | |
| ENV HOME=/home/user | |
| ENV PATH="/home/user/.local/bin:${PATH}" | |
| EXPOSE 7860 | |
| ENTRYPOINT ["/docker-entrypoint.sh"] | |