NS-Genai commited on
Commit
950bfa6
·
verified ·
1 Parent(s): 2720160

Create Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +95 -0
Dockerfile ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use Node.js 22 (required by OpenClaw) as the base image
2
+ FROM node:22-bookworm-slim
3
+
4
+ # Install system tools, Ollama dependencies (zstd), and build-essential tools
5
+ RUN apt-get update && apt-get install -y \
6
+ git curl procps zstd python3 make g++ jq \
7
+ && curl -fsSL https://ollama.com/install.sh | sh \
8
+ && rm -rf /var/lib/apt/lists/*
9
+
10
+ # Create directories and assign ownership to the built-in "node" user
11
+ RUN mkdir -p /home/node/.ollama && chown -R node:node /home/node/.ollama
12
+ RUN mkdir -p /home/node/.npm-global && chown -R node:node /home/node/.npm-global
13
+ RUN mkdir -p /home/node/.openclaw/workspace && chown -R node:node /home/node/.openclaw
14
+
15
+ # Switch to the non-root "node" user
16
+ USER node
17
+ ENV HOME=/home/node
18
+ ENV PATH=/home/node/.npm-global/bin:$PATH
19
+ WORKDIR $HOME
20
+
21
+ # Configure npm and install OpenClaw globally
22
+ RUN npm config set prefix '~/.npm-global'
23
+ RUN npm install -g openclaw@latest
24
+
25
+ # Expose the Hugging Face web port
26
+ EXPOSE 7860
27
+
28
+ # Create a robust startup script with the ultimate pairing bypass flag
29
+ RUN echo '#!/bin/bash\n\
30
+ echo "Writing default OpenClaw configuration..."\n\
31
+ cat <<EOF > /home/node/.openclaw/openclaw.json\n\
32
+ {\n\
33
+ "gateway": {\n\
34
+ "mode": "local",\n\
35
+ "bind": "lan",\n\
36
+ "trustedProxies": ["10.0.0.0/8", "127.0.0.1"],\n\
37
+ "auth": {\n\
38
+ "token": "pelm-my-super-secret-password-123"\n\
39
+ },\n\
40
+ "controlUi": {\n\
41
+ "allowInsecureAuth": true,\n\
42
+ "dangerouslyDisableDeviceAuth": true\n\
43
+ }\n\
44
+ },\n\
45
+ "models": {\n\
46
+ "mode": "merge",\n\
47
+ "providers": {\n\
48
+ "ollama": {\n\
49
+ "baseUrl": "http://127.0.0.1:11434/v1",\n\
50
+ "apiKey": "ollama-local",\n\
51
+ "api": "openai-responses",\n\
52
+ "models": [\n\
53
+ {\n\
54
+ "id": "qwen2.5-coder:14b",\n\
55
+ "name": "Qwen 2.5 Coder 14B",\n\
56
+ "reasoning": false,\n\
57
+ "input": ["text"],\n\
58
+ "cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },\n\
59
+ "contextWindow": 32000,\n\
60
+ "maxTokens": 8192\n\
61
+ }\n\
62
+ ]\n\
63
+ }\n\
64
+ }\n\
65
+ },\n\
66
+ "agents": {\n\
67
+ "defaults": {\n\
68
+ "model": {\n\
69
+ "primary": "ollama/qwen2.5-coder:14b"\n\
70
+ }\n\
71
+ }\n\
72
+ }\n\
73
+ }\n\
74
+ EOF\n\
75
+ \n\
76
+ echo "Wiping stale device identities to prevent 1008 mismatch errors..."\n\
77
+ rm -rf /home/node/.openclaw/devices /home/node/.openclaw/identity\n\
78
+ \n\
79
+ echo "Starting Ollama server..."\n\
80
+ ollama serve &\n\
81
+ \n\
82
+ echo "Waiting for Ollama server to be active..."\n\
83
+ while ! ollama list > /dev/null 2>&1; do\n\
84
+ sleep 1\n\
85
+ done\n\
86
+ \n\
87
+ echo "Ollama is up! Pulling Qwen 2.5 Coder 14B..."\n\
88
+ ollama pull qwen2.5-coder:14b\n\
89
+ \n\
90
+ echo "Model ready! Starting OpenClaw..."\n\
91
+ openclaw gateway --port 7860 --allow-unconfigured\n\
92
+ ' > start.sh && chmod +x start.sh
93
+
94
+ # Start the Space
95
+ CMD ["./start.sh"]