huanbao commited on
Commit
ea2978f
·
verified ·
1 Parent(s): ac0a4a0

Create sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +257 -0
sync_data.sh ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ # ==============================================================================
3
+ # 通用数据库多源备份脚本 (WebDAV + S3/R2/C2)
4
+ # 适配项目: landppt.db
5
+ # ==============================================================================
6
+
7
+ # ----------------- 配置区 (修改了这里) -----------------
8
+ # 数据库所在目录 (根据您的路径 sqlite:///./landppt.db,这里设为当前目录 ".")
9
+ # 如果脚本不在项目根目录运行,请将 DATA_DIR 改为绝对路径,例如 "/app/data"
10
+ DATA_DIR="."
11
+
12
+ # 数据库文件名
13
+ DB_NAME="landppt.db"
14
+
15
+ # 拼接完整路径
16
+ DB_FILE="${DATA_DIR}/${DB_NAME}"
17
+
18
+ # 备份文件的前缀 (区分不同项目的备份)
19
+ BACKUP_PREFIX="landppt_backup_"
20
+
21
+ # 备份间隔与保留数量
22
+ SYNC_INTERVAL="${SYNC_INTERVAL:-600}"
23
+ BACKUP_KEEP="${BACKUP_KEEP:-24}"
24
+
25
+ # 超时设置 (秒)
26
+ TIMEOUT_RESTORE="60"
27
+ TIMEOUT_CMD="120"
28
+
29
+ log() { echo "[Backup] $(date '+%Y-%m-%d %H:%M:%S') $*"; }
30
+
31
+ # ----------------- 检查函数 -----------------
32
+ has_webdav() { [[ -n "$WEBDAV_URL" && -n "$WEBDAV_USERNAME" && -n "$WEBDAV_PASSWORD" ]]; }
33
+ has_s3() { [[ -n "$S3_ENDPOINT_URL" && -n "$S3_BUCKET" && -n "$S3_ACCESS_KEY_ID" ]]; }
34
+ has_s3_2() { [[ -n "$S3_2_ENDPOINT_URL" && -n "$S3_2_BUCKET" && -n "$S3_2_ACCESS_KEY_ID" ]]; }
35
+
36
+ run_with_timeout() {
37
+ local t="$1"; shift
38
+ if command -v timeout >/dev/null; then
39
+ timeout "$t" "$@"
40
+ local rc=$?
41
+ [ $rc -eq 124 ] && log "错误: 操作超时 ($t秒)" && return 124
42
+ return $rc
43
+ else
44
+ "$@"
45
+ fi
46
+ }
47
+
48
+ # ----------------- (修改了这里) 解压数据库函数 -----------------
49
+ # Python 脚本中需要匹配新的文件名 DB_NAME
50
+ extract_db() {
51
+ local tar_path="$1"
52
+ [ ! -f "$tar_path" ] && return 1
53
+ mkdir -p "$DATA_DIR"
54
+ python3 -c "
55
+ import sys, tarfile, os, shutil
56
+ try:
57
+ with tarfile.open('$tar_path', 'r:gz') as tar:
58
+ # 查找以 DB_NAME 结尾的文件
59
+ m = next((m for m in tar.getmembers() if m.name.endswith('$DB_NAME')), None)
60
+ if m:
61
+ m.name = os.path.basename(m.name)
62
+ tar.extract(m, '/tmp/restore_tmp')
63
+ shutil.move('/tmp/restore_tmp/'+m.name, '$DB_FILE')
64
+ print('ok')
65
+ else:
66
+ sys.exit(1)
67
+ except:
68
+ sys.exit(1)
69
+ " >/dev/null 2>&1
70
+ }
71
+
72
+ # ----------------- 1. (修改了这里) 获取最新文件名逻辑 -----------------
73
+ # 匹配前缀 BACKUP_PREFIX
74
+
75
+ # 获取 S3 最新文件名
76
+ get_s3_latest_name() {
77
+ local ENDPOINT="$1" BUCKET="$2" ACCESS="$3" SECRET="$4"
78
+ export AWS_ACCESS_KEY_ID="$ACCESS"
79
+ export AWS_SECRET_ACCESS_KEY="$SECRET"
80
+ export AWS_DEFAULT_REGION="auto"
81
+ run_with_timeout 20 aws --endpoint-url "$ENDPOINT" s3 ls "s3://$BUCKET/" 2>/dev/null \
82
+ | awk '{print $4}' | grep "^$BACKUP_PREFIX" | sort | tail -n 1
83
+ }
84
+
85
+ # 获取 WebDAV 最新文件名
86
+ get_webdav_latest_name() {
87
+ python3 -c "
88
+ import os, sys
89
+ from webdav3.client import Client
90
+ try:
91
+ hostname = '$WEBDAV_URL'
92
+ sub = os.environ.get('WEBDAV_BACKUP_PATH', '')
93
+ if sub: hostname = hostname.rstrip('/') + '/' + sub.strip('/')
94
+ opts = {
95
+ 'webdav_hostname': hostname,
96
+ 'webdav_login': '$WEBDAV_USERNAME',
97
+ 'webdav_password': '$WEBDAV_PASSWORD',
98
+ 'webdav_timeout': 15
99
+ }
100
+ client = Client(opts)
101
+ # 查找以 BACKUP_PREFIX 开头的文件
102
+ files = [f for f in client.list() if f.startswith('$BACKUP_PREFIX') and f.endswith('.tar.gz')]
103
+ if files: print(sorted(files)[-1])
104
+ except: pass
105
+ "
106
+ }
107
+
108
+ # ----------------- 2. 下载逻辑 (无须大幅修改) -----------------
109
+ download_s3_file() {
110
+ local ENDPOINT="$1" BUCKET="$2" ACCESS="$3" SECRET="$4" FILE="$5" DL_PATH="$6"
111
+ log "从 S3 下载: $FILE ..."
112
+ export AWS_ACCESS_KEY_ID="$ACCESS"
113
+ export AWS_SECRET_ACCESS_KEY="$SECRET"
114
+ export AWS_DEFAULT_REGION="auto"
115
+ run_with_timeout "$TIMEOUT_RESTORE" aws --endpoint-url "$ENDPOINT" s3 cp "s3://$BUCKET/$FILE" "$DL_PATH" --quiet
116
+ }
117
+
118
+ download_webdav_file() {
119
+ local FILE="$1" DL_PATH="$2"
120
+ log "从 WebDAV 下载: $FILE ..."
121
+ python3 -c "
122
+ import requests, os, sys
123
+ hostname = '$WEBDAV_URL'
124
+ sub = os.environ.get('WEBDAV_BACKUP_PATH', '')
125
+ if sub: hostname = hostname.rstrip('/') + '/' + sub.strip('/')
126
+ url = hostname + '/' + '$FILE'
127
+ try:
128
+ with requests.get(url, auth=('$WEBDAV_USERNAME', '$WEBDAV_PASSWORD'), stream=True, timeout=60) as r:
129
+ r.raise_for_status()
130
+ with open('$DL_PATH', 'wb') as f:
131
+ for chunk in r.iter_content(8192): f.write(chunk)
132
+ except: sys.exit(1)
133
+ "
134
+ }
135
+
136
+ # ----------------- 主启动恢复流程 -----------------
137
+ if [ -f "$DB_FILE" ] && [ -s "$DB_FILE" ]; then
138
+ log "本地数据库 ($DB_NAME) 已存在,跳过恢复。"
139
+ else
140
+ log "正在检查所有备份源的最新版本..."
141
+ CANDIDATES_FILE="/tmp/backup_candidates.txt"
142
+ > "$CANDIDATES_FILE"
143
+
144
+ # 1. 检查 S3 (主)
145
+ if has_s3; then
146
+ F_S3=$(get_s3_latest_name "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY")
147
+ if [ -n "$F_S3" ]; then
148
+ echo "$F_S3 S3_MAIN" >> "$CANDIDATES_FILE"
149
+ log "发现 S3(主): $F_S3"
150
+ fi
151
+ fi
152
+ # 2. 检查 S3 (备)
153
+ if has_s3_2; then
154
+ F_S3_2=$(get_s3_latest_name "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY")
155
+ if [ -n "$F_S3_2" ]; then
156
+ echo "$F_S3_2 S3_SEC" >> "$CANDIDATES_FILE"
157
+ log "发现 S3(备): $F_S3_2"
158
+ fi
159
+ fi
160
+ # 3. 检查 WebDAV
161
+ if has_webdav; then
162
+ F_DAV=$(get_webdav_latest_name)
163
+ if [ -n "$F_DAV" ]; then
164
+ echo "$F_DAV WEBDAV" >> "$CANDIDATES_FILE"
165
+ log "发现 WebDAV: $F_DAV"
166
+ fi
167
+ fi
168
+
169
+ # 4. 决策
170
+ BEST_LINE=$(sort -r "$CANDIDATES_FILE" | head -n 1)
171
+ if [ -n "$BEST_LINE" ]; then
172
+ TARGET_FILE=$(echo "$BEST_LINE" | awk '{print $1}')
173
+ SOURCE_TYPE=$(echo "$BEST_LINE" | awk '{print $2}')
174
+ DL_FILE="/tmp/restore.tar.gz"
175
+ rm -f "$DL_FILE"
176
+ log ">>> 决定使用最新备份: $TARGET_FILE (来源: $SOURCE_TYPE)"
177
+ SUCCESS=0
178
+ case "$SOURCE_TYPE" in
179
+ "S3_MAIN")
180
+ download_s3_file "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$TARGET_FILE" "$DL_FILE"
181
+ ;;
182
+ "S3_SEC")
183
+ download_s3_file "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$TARGET_FILE" "$DL_FILE"
184
+ ;;
185
+ "WEBDAV")
186
+ download_webdav_file "$TARGET_FILE" "$DL_FILE"
187
+ ;;
188
+ esac
189
+
190
+ if [ -f "$DL_FILE" ] && [ -s "$DL_FILE" ]; then
191
+ extract_db "$DL_FILE"
192
+ if [ $? -eq 0 ]; then
193
+ log "恢复成功!"
194
+ SUCCESS=1
195
+ rm -f "$DL_FILE"
196
+ fi
197
+ fi
198
+ if [ $SUCCESS -eq 0 ]; then
199
+ log "错误: 尽管发现了文件,但下载或解压失败。"
200
+ fi
201
+ else
202
+ log "未在任何源中找到备份文件,将启动全新实例。"
203
+ fi
204
+ rm -f "$CANDIDATES_FILE"
205
+ fi
206
+
207
+ # ----------------- (修改了这里) 开启后台备份 -----------------
208
+ (
209
+ while true; do
210
+ sleep "$SYNC_INTERVAL"
211
+ if [ -f "$DB_FILE" ]; then
212
+ TS=$(date +%Y%m%d_%H%M%S)
213
+ BACKUP_NAME="${BACKUP_PREFIX}${TS}.tar.gz"
214
+ TMP_BAK="/tmp/$BACKUP_NAME"
215
+
216
+ # (修改) 打包指定的文件名 DB_NAME
217
+ tar -czf "$TMP_BAK" -C "$DATA_DIR" "$DB_NAME" 2>/dev/null
218
+
219
+ # 上传 WebDAV
220
+ if has_webdav; then
221
+ run_with_timeout "$TIMEOUT_CMD" curl -s -f --connect-timeout 15 \
222
+ -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \
223
+ -T "$TMP_BAK" \
224
+ "${WEBDAV_URL%/}/${WEBDAV_BACKUP_PATH#/}/$BACKUP_NAME" >/dev/null 2>&1
225
+ fi
226
+
227
+ # 上传 S3 (主)
228
+ if has_s3; then
229
+ export AWS_ACCESS_KEY_ID="$S3_ACCESS_KEY_ID"
230
+ export AWS_SECRET_ACCESS_KEY="$S3_SECRET_ACCESS_KEY"
231
+ export AWS_DEFAULT_REGION="auto"
232
+ run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_ENDPOINT_URL" s3 cp "$TMP_BAK" "s3://$S3_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1
233
+
234
+ # (修改) S3 清理,匹配 BACKUP_PREFIX
235
+ FILES=$(aws --endpoint-url "$S3_ENDPOINT_URL" s3 ls "s3://$S3_BUCKET/" | awk '{print $4}' | grep "^$BACKUP_PREFIX" | sort)
236
+ COUNT=$(echo "$FILES" | wc -l)
237
+ if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
238
+ DEL=$(($COUNT - $BACKUP_KEEP))
239
+ echo "$FILES" | head -n "$DEL" | while read -r F; do
240
+ aws --endpoint-url "$S3_ENDPOINT_URL" s3 rm "s3://$S3_BUCKET/$F" --quiet
241
+ done
242
+ fi
243
+ fi
244
+
245
+ # 上传 S3 (备)
246
+ if has_s3_2; then
247
+ export AWS_ACCESS_KEY_ID="$S3_2_ACCESS_KEY_ID"
248
+ export AWS_SECRET_ACCESS_KEY="$S3_2_SECRET_ACCESS_KEY"
249
+ export AWS_DEFAULT_REGION="auto"
250
+ run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_2_ENDPOINT_URL" s3 cp "$TMP_BAK" "s3://$S3_2_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1
251
+ fi
252
+
253
+ rm -f "$TMP_BAK"
254
+ log "备份完成: $BACKUP_NAME"
255
+ fi
256
+ done
257
+ ) &