Spaces:
Running
Running
copilot-swe-agent[bot]
Fix Ollama v0.17.5 Docker build: download .tar.zst archive with zstd extraction
439ff1f | # Moltbot Hybrid Engine - Multi-service Dockerfile | |
| # Runs: FastAPI (port 7860) + Ollama (optional) + OpenClaw/Clawdbot gateway (port 18789) | |
| # Build: 2026-02-14 v7.0 — Add Clawdbot (OpenClaw) for autonomous agent in HF Space | |
| # FIX v6: Dual LLM backend - Ollama (if available) + HF Inference API fallback | |
| FROM python:3.11-slim | |
| # Install packages required for HF Spaces Dev Mode + our needs | |
| RUN apt-get update && apt-get install -y --no-install-recommends \ | |
| bash \ | |
| curl \ | |
| wget \ | |
| procps \ | |
| git \ | |
| git-lfs \ | |
| file \ | |
| ca-certificates \ | |
| zstd \ | |
| && apt-get clean \ | |
| && rm -rf /var/lib/apt/lists/* | |
| # Install Node.js 22 (required for OpenClaw/Clawdbot) | |
| RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash - \ | |
| && apt-get install -y nodejs \ | |
| && apt-get clean \ | |
| && rm -rf /var/lib/apt/lists/* \ | |
| && node -v \ | |
| && npm -v | |
| # Install OpenClaw (Clawdbot) globally so gateway can run in Space | |
| RUN npm install -g openclaw@latest \ | |
| && (command -v openclaw || true) | |
| # Install Ollama AS ROOT - pinned version, force amd64 | |
| # REQUIRED - build fails if download fails or binary is corrupt | |
| RUN echo "Downloading Ollama v0.17.5 (amd64)..." && \ | |
| curl -fSL --retry 3 --retry-delay 10 --max-time 300 \ | |
| "https://github.com/ollama/ollama/releases/download/v0.17.5/ollama-linux-amd64.tar.zst" \ | |
| -o /tmp/ollama-linux-amd64.tar.zst && \ | |
| tar -I zstd -xf /tmp/ollama-linux-amd64.tar.zst -C /usr/local && \ | |
| rm -f /tmp/ollama-linux-amd64.tar.zst && \ | |
| chmod +x /usr/local/bin/ollama && \ | |
| echo "Ollama binary: $(file /usr/local/bin/ollama)" && \ | |
| echo "Size: $(stat -c%s /usr/local/bin/ollama) bytes" && \ | |
| file /usr/local/bin/ollama | grep -q "ELF" || (echo "FATAL: Ollama binary is not ELF" && exit 1) && \ | |
| test $(stat -c%s /usr/local/bin/ollama) -gt 1000000 || (echo "FATAL: Ollama binary too small" && exit 1) && \ | |
| echo "OK: Ollama verified" | |
| # Create HF-required user (uid 1000) | |
| RUN useradd -m -u 1000 user | |
| # Create Ollama model storage directory owned by user | |
| RUN mkdir -p /home/user/ollama_models && chown -R user:user /home/user/ollama_models | |
| # Switch to user | |
| USER user | |
| ENV HOME=/home/user \ | |
| PATH=/home/user/.local/bin:/usr/local/bin:$PATH \ | |
| OLLAMA_MODELS=/home/user/ollama_models \ | |
| OLLAMA_HOST=0.0.0.0 | |
| # Set working directory to /app (required for dev mode) | |
| WORKDIR /app | |
| # Upgrade pip | |
| RUN pip install --no-cache-dir --upgrade pip | |
| # Copy all files with correct ownership | |
| COPY --chown=user . /app | |
| # OpenClaw/Clawdbot: minimal config so gateway starts without interactive onboarding | |
| RUN mkdir -p /home/user/.openclaw/workspace | |
| COPY --chown=user openclaw.json /home/user/.openclaw/openclaw.json | |
| # Install Python dependencies (includes huggingface_hub for Inference API) | |
| RUN pip install --no-cache-dir -r requirements.txt | |
| # Make start script executable | |
| RUN chmod +x start.sh | |
| # Expose HF Spaces port (7860) and OpenClaw gateway (18789, internal) | |
| EXPOSE 7860 18789 | |
| # CMD required (not ENTRYPOINT) for dev mode compatibility | |
| CMD ["./start.sh"] | |