File size: 2,736 Bytes
824c04b 2d389da 46a4437 2d389da 46a4437 824c04b 2d389da 482afca 4d7374c 46a4437 824c04b 4d7374c 2d389da 824c04b 2d389da 824c04b 2d389da 10b7e63 824c04b 46a4437 3a4f449 46a4437 a8147cf 490017f 46a4437 715ab54 46a4437 588b2a8 46a4437 10b7e63 46a4437 588b2a8 46a4437 10b7e63 eac4083 10b7e63 eac4083 46a4437 482afca 824c04b 482afca 10b7e63 482afca 3a4f449 824c04b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 | # Use Node.js 22 (required by OpenClaw) as the base image
FROM node:22-bookworm-slim
# Install system tools, Ollama dependencies (zstd), and build-essential tools
RUN apt-get update && apt-get install -y \
git curl procps zstd python3 make g++ jq \
&& curl -fsSL https://ollama.com/install.sh | sh \
&& rm -rf /var/lib/apt/lists/*
# Create directories and assign ownership to the built-in "node" user
RUN mkdir -p /home/node/.ollama && chown -R node:node /home/node/.ollama
RUN mkdir -p /home/node/.npm-global && chown -R node:node /home/node/.npm-global
RUN mkdir -p /home/node/.openclaw/workspace && chown -R node:node /home/node/.openclaw
# Switch to the non-root "node" user
USER node
ENV HOME=/home/node
ENV PATH=/home/node/.npm-global/bin:$PATH
WORKDIR $HOME
# Configure npm and install OpenClaw globally
RUN npm config set prefix '~/.npm-global'
RUN npm install -g openclaw@latest
# Expose the Hugging Face web port
EXPOSE 7860
# Create a robust startup script with Qwen 3 8B configured
RUN echo '#!/bin/bash\n\
echo "Writing default OpenClaw configuration..."\n\
cat <<EOF > /home/node/.openclaw/openclaw.json\n\
{\n\
"gateway": {\n\
"mode": "local",\n\
"bind": "lan",\n\
"trustedProxies": ["10.0.0.0/8", "127.0.0.1"],\n\
"auth": {\n\
"token": "pelm-my-super-secret-password-123"\n\
},\n\
"controlUi": {\n\
"allowInsecureAuth": true,\n\
"dangerouslyDisableDeviceAuth": true\n\
}\n\
},\n\
"models": {\n\
"mode": "merge",\n\
"providers": {\n\
"ollama": {\n\
"baseUrl": "http://127.0.0.1:11434/v1",\n\
"apiKey": "ollama-local",\n\
"api": "openai-responses",\n\
"models": [\n\
{\n\
"id": "qwen3:8b",\n\
"name": "Qwen 3 (8B)",\n\
"reasoning": false,\n\
"input": ["text"],\n\
"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },\n\
"contextWindow": 40000,\n\
"maxTokens": 8192\n\
}\n\
]\n\
}\n\
}\n\
},\n\
"agents": {\n\
"defaults": {\n\
"model": {\n\
"primary": "ollama/qwen3:8b"\n\
},\n\
"models": {\n\
"ollama/qwen3:8b": {\n\
"streaming": false\n\
}\n\
}\n\
}\n\
}\n\
}\n\
EOF\n\
\n\
echo "Starting Ollama server..."\n\
ollama serve &\n\
\n\
echo "Waiting for Ollama server to be active..."\n\
while ! ollama list > /dev/null 2>&1; do\n\
sleep 1\n\
done\n\
\n\
echo "Ollama is up! Pulling Qwen 3 8B..."\n\
ollama pull qwen3:8b\n\
\n\
echo "Model ready! Starting OpenClaw..."\n\
openclaw gateway --port 7860 --allow-unconfigured\n\
' > start.sh && chmod +x start.sh
# Start the Space
CMD ["./start.sh"] |