Spaces:
Paused
Paused
| # Use Node.js 22 (required by OpenClaw) as the base image | |
| FROM node:22-bookworm-slim | |
| # Install system tools, Ollama dependencies (zstd), and build-essential tools | |
| RUN apt-get update && apt-get install -y \ | |
| git curl procps zstd python3 make g++ jq \ | |
| && curl -fsSL https://ollama.com/install.sh | sh \ | |
| && rm -rf /var/lib/apt/lists/* | |
| # Create directories and assign ownership to the built-in "node" user | |
| RUN mkdir -p /home/node/.ollama && chown -R node:node /home/node/.ollama | |
| RUN mkdir -p /home/node/.npm-global && chown -R node:node /home/node/.npm-global | |
| RUN mkdir -p /home/node/.openclaw/workspace && chown -R node:node /home/node/.openclaw | |
| # Switch to the non-root "node" user | |
| USER node | |
| ENV HOME=/home/node | |
| ENV PATH=/home/node/.npm-global/bin:$PATH | |
| WORKDIR $HOME | |
| # Configure npm and install OpenClaw globally | |
| RUN npm config set prefix '~/.npm-global' | |
| RUN npm install -g openclaw@latest | |
| # Expose the Hugging Face web port | |
| EXPOSE 7860 | |
| # Create a robust startup script with the ultimate pairing bypass flag | |
| RUN echo '#!/bin/bash\n\ | |
| echo "Writing default OpenClaw configuration..."\n\ | |
| cat <<EOF > /home/node/.openclaw/openclaw.json\n\ | |
| {\n\ | |
| "gateway": {\n\ | |
| "mode": "local",\n\ | |
| "bind": "lan",\n\ | |
| "trustedProxies": ["10.0.0.0/8", "127.0.0.1"],\n\ | |
| "auth": {\n\ | |
| "token": "pelm-my-super-secret-password-123"\n\ | |
| },\n\ | |
| "controlUi": {\n\ | |
| "allowInsecureAuth": true,\n\ | |
| "dangerouslyDisableDeviceAuth": true\n\ | |
| }\n\ | |
| },\n\ | |
| "models": {\n\ | |
| "mode": "merge",\n\ | |
| "providers": {\n\ | |
| "ollama": {\n\ | |
| "baseUrl": "http://127.0.0.1:11434/v1",\n\ | |
| "apiKey": "ollama-local",\n\ | |
| "api": "openai-responses",\n\ | |
| "models": [\n\ | |
| {\n\ | |
| "id": "qwen2.5-coder:14b",\n\ | |
| "name": "Qwen 2.5 Coder 14B",\n\ | |
| "reasoning": false,\n\ | |
| "input": ["text"],\n\ | |
| "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },\n\ | |
| "contextWindow": 32000,\n\ | |
| "maxTokens": 8192\n\ | |
| }\n\ | |
| ]\n\ | |
| }\n\ | |
| }\n\ | |
| },\n\ | |
| "agents": {\n\ | |
| "defaults": {\n\ | |
| "model": {\n\ | |
| "primary": "ollama/qwen2.5-coder:14b"\n\ | |
| }\n\ | |
| }\n\ | |
| }\n\ | |
| }\n\ | |
| EOF\n\ | |
| \n\ | |
| echo "Wiping stale device identities to prevent 1008 mismatch errors..."\n\ | |
| rm -rf /home/node/.openclaw/devices /home/node/.openclaw/identity\n\ | |
| \n\ | |
| echo "Starting Ollama server..."\n\ | |
| ollama serve &\n\ | |
| \n\ | |
| echo "Waiting for Ollama server to be active..."\n\ | |
| while ! ollama list > /dev/null 2>&1; do\n\ | |
| sleep 1\n\ | |
| done\n\ | |
| \n\ | |
| echo "Ollama is up! Pulling Qwen 2.5 Coder 14B..."\n\ | |
| ollama pull qwen2.5-coder:14b\n\ | |
| \n\ | |
| echo "Model ready! Starting OpenClaw..."\n\ | |
| openclaw gateway --port 7860 --allow-unconfigured\n\ | |
| ' > start.sh && chmod +x start.sh | |
| # Start the Space | |
| CMD ["./start.sh"] |