deeme commited on
Commit
9b35d58
·
verified ·
1 Parent(s): 75e6ef5

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +56 -0
  2. README.md +3 -4
  3. sync_data.sh +115 -0
Dockerfile ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM ghcr.io/open-webui/open-webui:main
2
+
3
+ # 安装Redis
4
+ RUN apt-get update && apt-get install -y redis-server
5
+
6
+ # 修改Redis配置和权限
7
+ RUN mkdir -p /var/run/redis && \
8
+ chown -R 1000:1000 /var/run/redis && \
9
+ chown -R 1000:1000 /var/lib/redis && \
10
+ chmod 777 /var/run/redis
11
+
12
+ # 创建启动Redis的脚本
13
+ RUN echo "#!/bin/bash" > redis-start.sh && \
14
+ echo "redis-server --daemonize yes --save '' --appendonly no" >> redis-start.sh && \
15
+ echo "sleep 2" >> redis-start.sh && \
16
+ echo "echo 'Redis status:'" >> redis-start.sh && \
17
+ echo "redis-cli ping" >> redis-start.sh
18
+
19
+ ENV TZ=Asia/Shanghai
20
+ #ENV UID=1000
21
+ #ENV GID=1000
22
+ ENV ENABLE_AUTH=True
23
+ ENV WEBUI_AUTH=True
24
+ ENV ENABLE_SIGNUP=false
25
+
26
+ ENV DEFAULT_MODELS=gpt-4o-mini
27
+ ENV RAG_EMBEDDING_ENGINE=openai
28
+ ENV RAG_EMBEDDING_MODEL=text-embedding-3-large
29
+ ENV ENABLE_IMAGE_GENERATION=true
30
+ ENV IMAGE_GENERATION_ENGINE=openai
31
+ ENV ENABLE_OLLAMA_API=false
32
+ ENV DEFAULT_LOCALE=cn
33
+ ENV ADMIN_EMAIL=admin@168369.xyz
34
+ ENV TASK_MODEL_EXTERNAL=gpt-4o-mini
35
+ ENV YOUTUBE_LOADER_LANGUAGE=cn
36
+ ENV ENABLE_RAG_WEB_SEARCH=true
37
+ ENV ENABLE_SEARCH_QUERY=true
38
+ ENV RAG_WEB_SEARCH_ENGINE=searxng,duckduckgo
39
+ ENV AUDIO_STT_ENGINE=openai
40
+ ENV AUDIO_TTS_ENGINE=openai
41
+ ENV ENABLE_REALTIME_CHAT_SAVE=false
42
+
43
+ # 数据同步相关
44
+ ENV SYNC_INTERVAL=600
45
+ ENV DATASET_ID=deeme/ui
46
+
47
+ RUN apt-get update && apt-get install -y python3 python3-pip
48
+ RUN pip3 install --no-cache-dir huggingface_hub
49
+
50
+ COPY sync_data.sh sync_data.sh
51
+
52
+ RUN chmod -R 777 ./data && \
53
+ chmod -R 777 /app/backend/open_webui/static && \
54
+ chmod +x sync_data.sh && \
55
+ sed -i "1r sync_data.sh" ./start.sh && \
56
+ sed -i "1r redis-start.sh" ./start.sh
README.md CHANGED
@@ -1,10 +1,9 @@
1
  ---
2
- title: Ui
3
- emoji:
4
  colorFrom: blue
5
  colorTo: pink
6
  sdk: docker
7
  pinned: false
 
8
  ---
9
-
10
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: ui
3
+ emoji: 👁
4
  colorFrom: blue
5
  colorTo: pink
6
  sdk: docker
7
  pinned: false
8
+ app_port: 8080
9
  ---
 
 
sync_data.sh ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
+
3
+ # 检查环境变量
4
+ if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
+ echo "缺少必要的环境变量 HF_TOKEN 或 DATASET_ID"
6
+ exit 1
7
+ fi
8
+
9
+ # 上传备份
10
+ cat > /tmp/hf_sync.py << 'EOL'
11
+ from huggingface_hub import HfApi
12
+ import sys
13
+ import os
14
+
15
+ def manage_backups(api, repo_id, max_files=50):
16
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
17
+ backup_files = [f for f in files if f.startswith('webui_backup_') and f.endswith('.db')]
18
+ backup_files.sort()
19
+
20
+ if len(backup_files) >= max_files:
21
+ files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
22
+ for file_to_delete in files_to_delete:
23
+ try:
24
+ api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
25
+ print(f'已删除旧备份: {file_to_delete}')
26
+ except Exception as e:
27
+ print(f'删除 {file_to_delete} 时出错: {str(e)}')
28
+
29
+ def upload_backup(file_path, file_name, token, repo_id):
30
+ api = HfApi(token=token)
31
+ try:
32
+ api.upload_file(
33
+ path_or_fileobj=file_path,
34
+ path_in_repo=file_name,
35
+ repo_id=repo_id,
36
+ repo_type="dataset"
37
+ )
38
+ print(f"成功上传 {file_name}")
39
+
40
+ manage_backups(api, repo_id)
41
+ except Exception as e:
42
+ print(f"文件上传出错: {str(e)}")
43
+
44
+ # 下载最新备份
45
+ def download_latest_backup(token, repo_id):
46
+ try:
47
+ api = HfApi(token=token)
48
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
49
+ backup_files = [f for f in files if f.startswith('webui_backup_') and f.endswith('.db')]
50
+
51
+ if not backup_files:
52
+ print("未找到备份文件")
53
+ return
54
+
55
+ latest_backup = sorted(backup_files)[-1]
56
+
57
+ filepath = api.hf_hub_download(
58
+ repo_id=repo_id,
59
+ filename=latest_backup,
60
+ repo_type="dataset"
61
+ )
62
+
63
+ if filepath and os.path.exists(filepath):
64
+ os.makedirs('./data', exist_ok=True)
65
+ os.system(f'cp "{filepath}" ./data/webui.db')
66
+ print(f"成功从 {latest_backup} 恢复备份")
67
+
68
+ except Exception as e:
69
+ print(f"下载备份时出错: {str(e)}")
70
+
71
+ if __name__ == "__main__":
72
+ action = sys.argv[1]
73
+ token = sys.argv[2]
74
+ repo_id = sys.argv[3]
75
+
76
+ if action == "upload":
77
+ file_path = sys.argv[4]
78
+ file_name = sys.argv[5]
79
+ upload_backup(file_path, file_name, token, repo_id)
80
+ elif action == "download":
81
+ download_latest_backup(token, repo_id)
82
+ EOL
83
+
84
+ # 首次启动时下载最新备份
85
+ echo "正在从 HuggingFace 下载最新备份..."
86
+ python3 /tmp/hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}"
87
+
88
+ # 同步函数
89
+ sync_data() {
90
+ while true; do
91
+ echo "开始同步进程 $(date)"
92
+
93
+ if [ -f "./data/webui.db" ]; then
94
+ timestamp=$(date +%Y%m%d_%H%M%S)
95
+ backup_file="webui_backup_${timestamp}.db"
96
+
97
+ # 复制数据库文件
98
+ cp ./data/webui.db "/tmp/${backup_file}"
99
+
100
+ echo "正在上传备份到 HuggingFace..."
101
+ python3 /tmp/hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
102
+
103
+ rm -f "/tmp/${backup_file}"
104
+ else
105
+ echo "数据库文件不存在,等待下次同步..."
106
+ fi
107
+
108
+ SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
109
+ echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
110
+ sleep $SYNC_INTERVAL
111
+ done
112
+ }
113
+
114
+ # 后台启动同步进程
115
+ sync_data &