Tea78 commited on
Commit
fc0eb94
·
verified ·
1 Parent(s): fb4ce02

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +109 -27
Dockerfile CHANGED
@@ -1,42 +1,124 @@
 
1
  FROM node:22-slim
2
- RUN apt-get update && apt-get install -y curl git
3
- RUN npm install -g openclaw@latest
4
 
5
- # 创建强制使用Nvidia的配置文件
6
- RUN mkdir -p /root/.openclaw && cat > /root/.openclaw/openclaw.json << 'EOF'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  {
8
- "gateway": {
9
- "mode": "local",
10
- "auth": {
11
- "mode": "token",
12
- "token": "$OPENCLAW_GATEWAY_TOKEN"
13
- }
14
- },
15
  "models": {
16
  "providers": {
17
  "nvidia": {
18
- "baseUrl": "https://build.nvidia.com/v1",
19
- "apiKey": "$OPENAI_API_KEY",
20
  "api": "openai-completions",
21
- "models": [
22
- {
23
- "id": "$MODEL",
24
- "name": "Kimi K2.5",
25
- "contextWindow": 256000
26
- }
27
- ]
28
  }
29
  }
30
  },
31
- "agents": {
32
- "defaults": {
33
- "model": {
34
- "primary": "nvidia/$MODEL"
35
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  }
37
  }
38
  }
39
  EOF
40
 
41
- EXPOSE 3000
42
- CMD ["openclaw", "gateway", "run", "--port", "3000", "--verbose"]
 
 
 
 
 
 
 
 
1
+ # 核心镜像选择
2
  FROM node:22-slim
 
 
3
 
4
+ # 1. 基础依赖补全
5
+ RUN apt-get update && apt-get install -y --no-install-recommends \
6
+ git openssh-client build-essential python3 python3-pip \
7
+ g++ make ca-certificates \
8
+ && rm -rf /var/lib/apt/lists/*
9
+
10
+ # 2. 安装 HF 数据交互工具
11
+ RUN pip3 install --no-cache-dir huggingface_hub --break-system-packages
12
+
13
+ # 3. 构建环境与 Git 协议优化
14
+ RUN update-ca-certificates && \
15
+ git config --global http.sslVerify false && \
16
+ git config --global url."https://github.com/".insteadOf ssh://git@github.com/
17
+
18
+ # 4. OpenClaw 核心安装
19
+ RUN npm install -g openclaw@latest --unsafe-perm
20
+
21
+ # 5. 环境变量预设
22
+ ENV PORT=7860 \
23
+ OPENCLAW_GATEWAY_MODE=local \
24
+ HOME=/root
25
+
26
+ # 6. Python 同步引擎 (sync.py)
27
+ RUN echo 'import os, sys, tarfile
28
+ from huggingface_hub import HfApi, hf_hub_download
29
+ from datetime import datetime, timedelta
30
+ api = HfApi()
31
+ repo_id = os.getenv("HF_DATASET")
32
+ token = os.getenv("HF_TOKEN")
33
+ def restore():
34
+ try:
35
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset", token=token)
36
+ now = datetime.now()
37
+ for i in range(5):
38
+ day = (now - timedelta(days=i)).strftime("%Y-%m-%d")
39
+ name = f"backup_{day}.tar.gz"
40
+ if name in files:
41
+ path = hf_hub_download(repo_id=repo_id, filename=name, repo_type="dataset", token=token)
42
+ with tarfile.open(path, "r:gz") as tar: tar.extractall(path="/root/.openclaw/")
43
+ print(f"Success: Restored from {name}")
44
+ return True
45
+ except Exception as e: print(f"Restore Error: {e}")
46
+ def backup():
47
+ try:
48
+ day = datetime.now().strftime("%Y-%m-%d")
49
+ name = f"backup_{day}.tar.gz"
50
+ with tarfile.open(name, "w:gz") as tar:
51
+ if os.path.exists("/root/.openclaw/sessions"): tar.add("/root/.openclaw/sessions", arcname="sessions")
52
+ tar.add("/root/.openclaw/openclaw.json", arcname="openclaw.json")
53
+ api.upload_file(path_or_fileobj=name, path_in_repo=name, repo_id=repo_id, repo_type="dataset", token=token)
54
+ print(f"Backup {name} Success.")
55
+ except Exception as e: print(f"Backup Error: {e}")
56
+ if __name__ == "__main__":
57
+ if len(sys.argv) > 1 and sys.argv[1] == "backup": backup()
58
+ else: restore()' > /usr/local/bin/sync.py
59
+
60
+ # 7. 启动控制逻辑(NVIDIA 配置版)
61
+ RUN echo '#!/bin/bash
62
+ set -e
63
+ mkdir -p /root/.openclaw/sessions
64
+
65
+ # 阶段 3: 执行启动前恢复
66
+ python3 /usr/local/bin/sync.py restore
67
+
68
+ # 处理地址逻辑
69
+ CLEAN_BASE=$(echo "$OPENAI_API_BASE" | sed "s|/chat/completions||g" | sed "s|/v1/|/v1|g" | sed "s|/v1$|/v1|g")
70
+
71
+ # 阶段 2: 生成网关与模型配置(NVIDIA 版)
72
+ cat > /root/.openclaw/openclaw.json <<EOF
73
  {
 
 
 
 
 
 
 
74
  "models": {
75
  "providers": {
76
  "nvidia": {
77
+ "baseUrl": "$CLEAN_BASE",
78
+ "apiKey": "$OPENAI_API_KEY",
79
  "api": "openai-completions",
80
+ "models": [{
81
+ "id": "$MODEL",
82
+ "name": "Kimi K2.5",
83
+ "contextWindow": 256000
84
+ }]
 
 
85
  }
86
  }
87
  },
88
+ "agents": {
89
+ "defaults": {
90
+ "model": {
91
+ "primary": "nvidia/$MODEL"
92
+ }
93
+ }
94
+ },
95
+ "gateway": {
96
+ "mode": "local",
97
+ "bind": "lan",
98
+ "port": $PORT,
99
+ "trustedProxies": ["0.0.0.0/0", "10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16"],
100
+ "auth": {
101
+ "mode": "token",
102
+ "token": "$OPENCLAW_GATEWAY_TOKEN"
103
+ },
104
+ "remote": {
105
+ "token": "$OPENCLAW_GATEWAY_TOKEN"
106
+ },
107
+ "controlUi": {
108
+ "allowInsecureAuth": true,
109
+ "dangerouslyAllowHostHeaderOriginFallback": true,
110
+ "dangerouslyDisableDeviceAuth": true
111
  }
112
  }
113
  }
114
  EOF
115
 
116
+ # 增量备份循环 (每 6 小时)
117
+ (while true; do sleep 21600; python3 /usr/local/bin/sync.py backup; done) &
118
+
119
+ openclaw doctor --fix
120
+ exec openclaw gateway run --port $PORT
121
+ ' > /usr/local/bin/start-openclaw && chmod +x /usr/local/bin/start-openclaw
122
+
123
+ EXPOSE 7860
124
+ CMD ["/usr/local/bin/start-openclaw"]