File size: 7,497 Bytes
a468774
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e8e52b0
017229a
5fb9486
a468774
acdb3cc
a468774
00c8c83
93b3d58
f7a5e22
00c8c83
a468774
 
 
4bb01e4
a468774
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
787077b
 
a468774
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
083084e
 
a468774
 
083084e
 
 
 
 
 
 
 
2a96d75
 
 
5007416
a468774
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ce99dce
a468774
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
FROM node:22-slim

# 1. Install system dependencies, python, and browser requirements
RUN apt-get update && apt-get install -y \
    git \
    curl \
    zip \
    unzip \
    tar \
    xz-utils \
    bzip2 \
    python3 \
    python3-pip \
    libglib2.0-0 \
    libnss3 \
    libnspr4 \
    libatk1.0-0 \
    libatk-bridge2.0-0 \
    libcups2 \
    libdrm2 \
    libxkbcommon0 \
    libxcomposite1 \
    libxdamage1 \
    libxrandr2 \
    libgbm1 \
    libgtk-3-0 \
    libasound2 \
    libpango-1.0-0 \
    libcairo2 \
    rsync \
    && rm -rf /var/lib/apt/lists/*

ARG TELEGRAM_BOT_APIBASE
ENV TELEGRAM_BOT_APIBASE=${TELEGRAM_BOT_APIBASE}

# 2. Install OpenClaw and browser tools
RUN npm install -g openclaw@latest agent-browser pdfnano  @google/gemini-cli

# Force patch Telegram API URL using environment variable
RUN echo "TELEGRAM_BOT_APIBASE=$TELEGRAM_BOT_APIBASE"
RUN find /usr/local/lib/node_modules/openclaw -type f -name "*.js" -exec sed -i "s|api.telegram.org|${TELEGRAM_BOT_APIBASE}|g" {} +

# 3. Install Hugging Face Hub (Python)
RUN pip3 install --no-cache-dir --break-system-packages huggingface_hub

# 4. Create directory structure
RUN mkdir -p /root/.openclaw/agents/main/agent \
    /root/.openclaw/agents/main/sessions \
    /root/.openclaw/credentials \
    /root/.openclaw/workspace

ENV PORT=7860
ENV OPENCLAW_GATEWAY_MODE=local
EXPOSE 7860

# 6. Startup script
RUN cat > /usr/local/bin/start-openclaw << 'EOF'
#!/bin/bash
set -e

OPENCLAW_DIR="/root/.openclaw"
HF_TMP="/tmp/hf-restore"

# DNS
#echo "nameserver 1.1.1.1" > /etc/resolv.conf 2>/dev/null || true
#echo "nameserver 8.8.8.8" >> /etc/resolv.conf 2>/dev/null || true

# --- RESTORE FROM HF DATASET ---
if [[ -n "$HF_DATASET_TOKEN" && -n "$HF_DATASET" ]]; then
  echo "[HF] Restoring dataset: $HF_DATASET"

  rm -rf "$HF_TMP"
  mkdir -p "$HF_TMP"

  python3 << 'EOP'
import os
from huggingface_hub import snapshot_download

snapshot_download(
    repo_id=os.environ["HF_DATASET"],
    repo_type="dataset",
    local_dir="/tmp/hf-restore",
    allow_patterns=["data/**"],
    token=os.environ["HF_DATASET_TOKEN"]
)
print("[HF] Download complete")
EOP

  # CRITICAL FIX: merge data/* into live OpenClaw dir
  if [ -d "$HF_TMP/data" ]; then
    rsync -a "$HF_TMP/data/" "$OPENCLAW_DIR/"
    echo "[HF] Data merged into OpenClaw workspace"
  fi
fi

# --- GENERATE CONFIG ONLY IF MISSING ---
if [ ! -f "$OPENCLAW_DIR/openclaw.json" ]; then
  echo "[CFG] Generating initial openclaw.json"

  if [ -z "$GEMINI_APIKEY" ]; then
    echo "ERROR: GEMINI_APIKEY not set"
    exit 1
  fi

cat > "$OPENCLAW_DIR/openclaw.json" << EOC
{
  "env": {
    "GOOGLE_API_KEY": "${GEMINI_APIKEY}",
    "GEMINI_API_KEY": "${GEMINI_APIKEY}",
    "GOOGLE_GENERATIVE_AI_API_KEY": "${GEMINI_APIKEY}",
    "TELEGRAM_BOT_TOKEN": "${TELEGRAM_BOT_TOKEN}"
  },
 "gateway": {
    "port": 7860,
    "mode": "local",
    "bind": "lan",
    "controlUi": {
      "allowedOrigins": [
        "http://localhost:7860",
        "http://127.0.0.1:7860",
        "https://amangs-cbot.hf.space"
      ],
      "allowInsecureAuth": true,
      "dangerouslyDisableDeviceAuth": true
      },
    "auth": {
      "mode": "token"
      }
    },
  "models": {
    "providers": {
      "nvidia": {
        "baseUrl": "https://integrate.api.nvidia.com/v1",
        "apiKey": "${NVIDIA_NIM_API_KEY}",
        "api": "openai-completions",
        "models": [
          { "id": "deepseek-ai/deepseek-r1-distill-qwen-7b", "name": "Deepseek R1 Distill Qwen 7b", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "deepseek-ai/deepseek-v3.2", "name": "Deepseek v3.2", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "openai/gpt-oss-120b", "name": "GPT OSS 120b", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "openai/gpt-oss-20b", "name": "GPT OSS 20b", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "qwen/qwen2.5-7b-instruct", "name": "Qwen 2.5 7B Instruct", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "qwen/qwen2.5-coder-32b-instruct", "name": "Qwen 2.5 Coder 32B", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "qwen/qwen2.5-coder-7b-instruct", "name": "Qwen 2.5 Coder 7B", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "mistralai/mistral-large-2-instruct", "name": "Mistral Large 2 Instruct", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "databricks/dbrx-instruct", "name": "DBRX Instruct", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "mistralai/mistral-7b-instruct-v0.3", "name": "Mistral 7B Instruct v0.3", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "minimaxai/minimax-m2.1", "name": "MiniMax M2.1", "contextWindow": 200000, "maxTokens": 8192 },
          { "id": "z-ai/glm4.7", "name": "GLM 4.7", "contextWindow": 200000, "maxTokens": 8192 }
        ]
      }
    }
  },
  "agents": {
    "defaults": {
      "model": {
        "primary": "google/gemini-3-flash-preview",
        "fallbacks": [
          "google/gemini-3-pro-preview",
          "google/gemini-2.0-flash",
          "google/gemini-2.5-flash",
          "google/gemini-2.5-pro",
          "google/gemini-3-flash-preview"
        ]
      },
      "models": {
        "google/gemini-3-pro-preview": {},
        "google/gemini-2.0-flash": {},
        "google/gemini-2.5-flash": {},
        "google/gemini-2.5-pro": {},
        "google/gemini-3-flash-preview": {},
        "nvidia/deepseek-ai/deepseek-r1-distill-qwen-7b": {},
        "nvidia/deepseek-ai/deepseek-v3.2": {},
        "nvidia/openai/gpt-oss-120b": {},
        "nvidia/openai/gpt-oss-20b": {},
        "nvidia/qwen/qwen2.5-7b-instruct": {},
        "nvidia/qwen/qwen2.5-coder-32b-instruct": {},
        "nvidia/qwen/qwen2.5-coder-7b-instruct": {},
        "nvidia/mistralai/mistral-large-2-instruct": {},
        "nvidia/mistralai/mistral-7b-instruct-v0.3": {},
        "nvidia/databricks/dbrx-instruct": {},
        "nvidia/minimaxai/minimax-m2.1": {},
        "nvidia/z-ai/glm4.7": {}
      },
      "workspace": "/root/.openclaw/workspace",
      "compaction": { "mode": "safeguard" },
      "maxConcurrent": 4,
      "subagents": { "maxConcurrent": 8 }
    }
  },
  "commands": {
    "native": "auto",
    "nativeSkills": "auto",
    "restart": true
  },
  "plugins": {
    "entries": {
      "telegram": {
        "enabled": true
      },
      "whatsapp": {
        "enabled": false
      },
      "discord": {
        "enabled": false
      },
      "qwen-portal-auth": {
        "enabled": false
      }
    }
  },
"channels": {
    "telegram": {
      "enabled": true,
      "allowFrom": ["${TELEGRAM_USER}"]
    }
  }
}
EOC
fi

chmod -R 700 "$OPENCLAW_DIR"

# --- BACKUP LOOP (EVERY 5 MINUTES) ---
if [[ -n "$HF_DATASET_TOKEN" && -n "$HF_DATASET" ]]; then
  (
    while true; do
      sleep 1800
      echo "[HF] Backup running"

      python3 << 'EOP'
import os
from huggingface_hub import HfApi

api = HfApi(token=os.environ["HF_DATASET_TOKEN"])
api.upload_folder(
    repo_id=os.environ["HF_DATASET"],
    repo_type="dataset",
    folder_path="/root/.openclaw",
    path_in_repo="data",
    ignore_patterns=[
    "openclawd.json",
    "agents/**/sessions/**",
    "workspace/node_modules/**",
    "browser/**",
    "workspace/.cache/**",
    "*.key",
    ".env"
  ]
)
print("[HF] Backup complete")
EOP
    done
  ) &
fi

exec openclaw gateway run --port 7860
EOF

RUN chmod +x /usr/local/bin/start-openclaw
CMD ["/usr/local/bin/start-openclaw"]