HuggingFace0920 commited on
Commit
9465d93
·
verified ·
1 Parent(s): 9728c75

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +156 -20
Dockerfile CHANGED
@@ -1,28 +1,164 @@
1
- FROM ghcr.io/open-webui/open-webui:main
2
 
3
- # 安装必要的依赖
4
- RUN apt-get update && apt-get install -y python3 python3-pip
 
 
 
 
5
 
6
- # 安装 huggingface_hub 库
7
- RUN pip3 install --no-cache-dir huggingface_hub
 
 
 
 
 
8
 
9
- # 设置必要的环境变量(根据实际需求调整)
10
- ENV HF_HOME="/app/data/huggingface"
11
- ENV HUGGINGFACE_HUB_CACHE="/app/data/huggingface"
 
 
 
 
 
12
 
13
- # 复制同步数据脚本
14
- COPY sync_data.sh /app/
 
 
 
 
 
 
 
 
 
 
 
15
 
16
- # 设置权限
17
- RUN chmod -R 777 /app/data && \
18
- chmod -R 777 /app/open_webui && \
19
- chmod +x /app/sync_data.sh
 
 
 
 
 
 
 
 
 
 
20
 
21
- # 修改启动脚本,确保同步脚本在启动时执行
22
- RUN sed -i "1r /app/sync_data.sh" /app/start.sh
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
- # 暴露端口(根据应用需求调整)
25
- EXPOSE 3000
 
 
 
 
 
 
26
 
27
- # 启动应用
28
- CMD ["/app/start.sh"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
 
3
+ # 检查环境变量
4
+ if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
+ echo "未检测到 HF_TOKEN 或 DATASET_ID,备份功能不可用"
6
+ # 修改为返回 0,避免容器启动失败
7
+ exit 0
8
+ fi
9
 
10
+ # 激活虚拟环境(如果需要)
11
+ if [ -d "$HOME/venv" ]; then
12
+ echo "激活虚拟环境..."
13
+ . $HOME/venv/bin/activate
14
+ else
15
+ echo "未找到虚拟环境,使用系统 Python..."
16
+ fi
17
 
18
+ # 生成同步脚本
19
+ cat > /app/hf_sync.py << 'EOL'
20
+ # HuggingFace 同步脚本
21
+ from huggingface_hub import HfApi
22
+ import sys
23
+ import os
24
+ import tarfile
25
+ import tempfile
26
 
27
+ # 管理备份文件数量,超出最大数量则自动删除最旧的备份
28
+ def manage_backups(api, repo_id, max_files=50):
29
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
30
+ backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
31
+ backup_files.sort()
32
+ if len(backup_files) >= max_files:
33
+ files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
34
+ for file_to_delete in files_to_delete:
35
+ try:
36
+ api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
37
+ print(f'已删除旧备份: {file_to_delete}')
38
+ except Exception as e:
39
+ print(f'删除 {file_to_delete} 时出错: {str(e)}')
40
 
41
+ # 上传备份文件到 HuggingFace
42
+ def upload_backup(file_path, file_name, token, repo_id):
43
+ api = HfApi(token=token)
44
+ try:
45
+ api.upload_file(
46
+ path_or_fileobj=file_path,
47
+ path_in_repo=file_name,
48
+ repo_id=repo_id,
49
+ repo_type="dataset"
50
+ )
51
+ print(f"成功上传 {file_name}")
52
+ manage_backups(api, repo_id)
53
+ except Exception as e:
54
+ print(f"上传文件出错: {str(e)}")
55
 
56
+ # 下载最新备份
57
+ def download_latest_backup(token, repo_id, extract_path):
58
+ try:
59
+ api = HfApi(token=token)
60
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
61
+ backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
62
+ if not backup_files:
63
+ print("未找到任何备份文件")
64
+ return
65
+ latest_backup = sorted(backup_files)[-1]
66
+ with tempfile.TemporaryDirectory() as temp_dir:
67
+ filepath = api.hf_hub_download(
68
+ repo_id=repo_id,
69
+ filename=latest_backup,
70
+ repo_type="dataset",
71
+ local_dir=temp_dir
72
+ )
73
+ if filepath and os.path.exists(filepath):
74
+ with tarfile.open(filepath, 'r:gz') as tar:
75
+ tar.extractall(extract_path)
76
+ print(f"已成功恢复备份: {latest_backup}")
77
+ except Exception as e:
78
+ print(f"下载备份出错: {str(e)}")
79
 
80
+ # 合并历史提交
81
+ def super_squash_history(token, repo_id):
82
+ try:
83
+ api = HfApi(token=token)
84
+ api.super_squash_history(repo_id=repo_id, repo_type="dataset")
85
+ print("历史合并完成。")
86
+ except Exception as e:
87
+ print(f"合并历史出错: {str(e)}")
88
 
89
+ # 主函数
90
+ if __name__ == "__main__":
91
+ action = sys.argv[1]
92
+ token = sys.argv[2]
93
+ repo_id = sys.argv[3]
94
+ if action == "upload":
95
+ file_path = sys.argv[4]
96
+ file_name = sys.argv[5]
97
+ upload_backup(file_path, file_name, token, repo_id)
98
+ elif action == "download":
99
+ extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
100
+ download_latest_backup(token, repo_id, extract_path)
101
+ elif action == "super_squash":
102
+ super_squash_history(token, repo_id)
103
+ EOL
104
+
105
+ # 首次启动时从 HuggingFace 下载最新备份(解压到应用目录)
106
+ echo "正在从 HuggingFace 下载最新备份..."
107
+ python3 /app/hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "/app/data"
108
+
109
+ # 同步函数
110
+ sync_data() {
111
+ while true; do
112
+ echo "同步进程启动于 $(date)"
113
+
114
+ # 确保数据目录存在
115
+ DATA_DIR="/app/data"
116
+ DB_FILE="${DATA_DIR}/webui.db"
117
+
118
+ if [ -f "${DB_FILE}" ]; then
119
+ # 创建备份
120
+ timestamp=$(date +%Y%m%d_%H%M%S)
121
+ backup_file="backup_${timestamp}.tar.gz"
122
+
123
+ # 压缩文件
124
+ tar -czf "/tmp/${backup_file}" -C "${DATA_DIR}" "webui.db"
125
+
126
+ # 上传到 HuggingFace
127
+ echo "正在上传备份到 HuggingFace..."
128
+ python3 /app/hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
129
+
130
+ # 合并历史提交
131
+ SQUASH_FLAG_FILE="/tmp/last_squash_time"
132
+ NOW=$(date +%s)
133
+ SEVEN_DAYS=$((7*24*60*60))
134
+ if [ ! -f "$SQUASH_FLAG_FILE" ]; then
135
+ echo $NOW > "$SQUASH_FLAG_FILE"
136
+ echo "首次合并历史提交..."
137
+ python3 /app/hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
138
+ else
139
+ LAST=$(cat "$SQUASH_FLAG_FILE")
140
+ DIFF=$((NOW - LAST))
141
+ if [ $DIFF -ge $SEVEN_DAYS ]; then
142
+ echo $NOW > "$SQUASH_FLAG_FILE"
143
+ echo "距离上次合并已超过7天,正在合并历史提交..."
144
+ python3 /app/hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
145
+ else
146
+ echo "距离上次合并未满7天,本次跳过合并历史提交。"
147
+ fi
148
+ fi
149
+
150
+ # 清理临时文件
151
+ rm -f "/tmp/${backup_file}"
152
+ else
153
+ echo "数据库文件 ${DB_FILE} 不存在,等待中..."
154
+ fi
155
+
156
+ # 同步间隔
157
+ SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
158
+ echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
159
+ sleep $SYNC_INTERVAL
160
+ done
161
+ }
162
+
163
+ # 启动同步进程(在后台运行)
164
+ sync_data &