chb2026 commited on
Commit
77b525c
·
verified ·
1 Parent(s): ad7ef88

Create sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +115 -0
sync_data.sh ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
+
3
+
4
+ # 检查环境变量
5
+ if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
6
+ echo "Starting without backup functionality - missing HF_TOKEN or DATASET_ID"
7
+ exit 1
8
+ fi
9
+
10
+ # 激活虚拟环境
11
+ . /home/app/venv/bin/activate
12
+
13
+ # 创建Python脚本
14
+ cat > /home/app/uptime-kuma/hf_sync.py << 'EOL'
15
+ from huggingface_hub import HfApi
16
+ import sys
17
+ import os
18
+ import tarfile
19
+ import tempfile
20
+
21
+ def upload_backup(file_path, file_name, token, repo_id):
22
+ api = HfApi(token=token)
23
+ try:
24
+ api.upload_file(
25
+ path_or_fileobj=file_path,
26
+ path_in_repo=file_name,
27
+ repo_id=repo_id,
28
+ repo_type="dataset"
29
+ )
30
+ print(f"Successfully uploaded {file_name}")
31
+ except Exception as e:
32
+ print(f"Error uploading file: {str(e)}")
33
+
34
+ def download_latest_backup(token, repo_id):
35
+ try:
36
+ api = HfApi(token=token)
37
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
38
+ backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
39
+
40
+ if not backup_files:
41
+ print("No backup files found")
42
+ return
43
+
44
+ latest_backup = sorted(backup_files)[-1]
45
+
46
+ with tempfile.TemporaryDirectory() as temp_dir:
47
+ filepath = api.hf_hub_download(
48
+ repo_id=repo_id,
49
+ filename=latest_backup,
50
+ repo_type="dataset",
51
+ local_dir=temp_dir
52
+ )
53
+
54
+ if filepath and os.path.exists(filepath):
55
+ with tarfile.open(filepath, 'r:gz') as tar:
56
+ tar.extractall('/home/app/uptime-kuma/')
57
+ print(f"Successfully restored backup from {latest_backup}")
58
+
59
+ except Exception as e:
60
+ print(f"Error downloading backup: {str(e)}")
61
+
62
+ if __name__ == "__main__":
63
+ action = sys.argv[1]
64
+ token = sys.argv[2]
65
+ repo_id = sys.argv[3]
66
+
67
+ if action == "upload":
68
+ file_path = sys.argv[4]
69
+ file_name = sys.argv[5]
70
+ upload_backup(file_path, file_name, token, repo_id)
71
+ elif action == "download":
72
+ download_latest_backup(token, repo_id)
73
+ EOL
74
+
75
+ # 首次启动时从HuggingFace下载最新备份
76
+ echo "Downloading latest backup from HuggingFace..."
77
+ python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}"
78
+
79
+ # 同步函数
80
+ sync_data() {
81
+ while true; do
82
+ echo "Starting sync process at $(date)"
83
+
84
+ # 确保数据目录存在
85
+ if [ -d "/home/app/uptime-kuma/data" ]; then
86
+ # 创建备份
87
+ cd /home/app/uptime-kuma
88
+ timestamp=$(date +%Y%m%d_%H%M%S)
89
+ backup_file="backup_${timestamp}.tar.gz"
90
+
91
+ # 压缩数据目录
92
+ tar -czf "/tmp/${backup_file}" data/
93
+
94
+ # 上传到HuggingFace
95
+ echo "Uploading backup to HuggingFace..."
96
+ python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
97
+
98
+ # 清理临时文件
99
+ rm -f "/tmp/${backup_file}"
100
+ else
101
+ echo "Data directory does not exist yet, waiting for next sync..."
102
+ fi
103
+
104
+ # 同步间隔
105
+ SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
106
+ echo "Next sync in ${SYNC_INTERVAL} seconds..."
107
+ sleep $SYNC_INTERVAL
108
+ done
109
+ }
110
+
111
+ # 启动同步进程
112
+ sync_data &
113
+
114
+ # 启动主应用
115
+ exec node server/server.js