huanbao commited on
Commit
a3f3b7f
·
verified ·
1 Parent(s): ea89910

Update sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +73 -70
sync_data.sh CHANGED
@@ -1,17 +1,19 @@
1
  #!/bin/bash
2
  # ==============================================================================
3
- # landppt 多源备份脚本 (完整逻辑)
4
- # 修复: 竞态条件(数据覆盖问题) + WAL模式支持 + Curl重构
5
  # ==============================================================================
6
 
7
  DATA_DIR="."
8
  DB_FILE="landppt.db"
 
 
 
9
  SYNC_INTERVAL="${SYNC_INTERVAL:-600}"
10
  BACKUP_KEEP="${BACKUP_KEEP:-24}"
11
  TIMEOUT_RESTORE="120"
12
  TIMEOUT_CMD="180"
13
 
14
- # S3 区域设置
15
  S3_REGION="${S3_REGION:-auto}"
16
  S3_2_REGION="${S3_2_REGION:-auto}"
17
 
@@ -27,12 +29,11 @@ run_with_timeout() {
27
  fi
28
  }
29
 
30
- # ----------------- 检查函数 -----------------
31
  has_webdav() { [[ -n "$WEBDAV_URL" && -n "$WEBDAV_USERNAME" && -n "$WEBDAV_PASSWORD" ]]; }
32
  has_s3() { [[ -n "$S3_ENDPOINT_URL" && -n "$S3_BUCKET" && -n "$S3_ACCESS_KEY_ID" ]]; }
33
  has_s3_2() { [[ -n "$S3_2_ENDPOINT_URL" && -n "$S3_2_BUCKET" && -n "$S3_2_ACCESS_KEY_ID" ]]; }
34
 
35
- # ----------------- WebDAV 工具函数 -----------------
36
  get_webdav_url() {
37
  local file="$1"
38
  local base="${WEBDAV_URL%/}"
@@ -60,13 +61,11 @@ download_webdav_file() {
60
  -o "$dl_path" "$(get_webdav_url "$file")"
61
  }
62
 
63
- # ----------------- S3 工具函数 -----------------
64
  get_s3_latest_name() {
65
  local ENDPOINT="$1" BUCKET="$2" ACCESS="$3" SECRET="$4" REGION="$5"
66
  export AWS_ACCESS_KEY_ID="$ACCESS"
67
  export AWS_SECRET_ACCESS_KEY="$SECRET"
68
  export AWS_DEFAULT_REGION="$REGION"
69
-
70
  run_with_timeout 30 aws --endpoint-url "$ENDPOINT" --region "$REGION" s3 ls "s3://$BUCKET/" 2>/dev/null \
71
  | awk '{print $4}' | grep 'landppt_backup_.*\.tar\.gz$' | sort | tail -n 1
72
  }
@@ -77,7 +76,6 @@ download_s3_file() {
77
  export AWS_ACCESS_KEY_ID="$ACCESS"
78
  export AWS_SECRET_ACCESS_KEY="$SECRET"
79
  export AWS_DEFAULT_REGION="$REGION"
80
-
81
  rm -f "$DL_PATH"
82
  if run_with_timeout "$TIMEOUT_RESTORE" aws --endpoint-url "$ENDPOINT" --region "$REGION" s3 cp "s3://$BUCKET/$FILE" "$DL_PATH" --quiet; then
83
  [ -s "$DL_PATH" ] && return 0
@@ -85,23 +83,23 @@ download_s3_file() {
85
  return 1
86
  }
87
 
88
- # ----------------- 解压工具 (修复数据不全问题) -----------------
89
- extract_db() {
 
90
  local tar_path="$1"
91
  mkdir -p "$DATA_DIR"
92
 
93
  # 尝试解压
94
  if tar -xzf "$tar_path" -C "$DATA_DIR" 2>/dev/null; then
95
- # 检查解压结果 (包含 .db, .db-shm, .db-wal)
96
  if ls "$DATA_DIR"/landppt.db* 1> /dev/null 2>&1; then
97
  return 0
98
  fi
99
 
100
- # 兼容旧逻辑:如果解压后在子目录或名字不对,尝试查找移动
101
  local found=$(find "$DATA_DIR" -name "landppt.db" -type f | head -n 1)
102
  if [ -n "$found" ] && [ "$found" != "$DATA_DIR/$DB_FILE" ]; then
103
  mv "$found" "$DATA_DIR/$DB_FILE"
104
- # 尝试移动关联文件
105
  [ -f "${found}-shm" ] && mv "${found}-shm" "$DATA_DIR/${DB_FILE}-shm"
106
  [ -f "${found}-wal" ] && mv "${found}-wal" "$DATA_DIR/${DB_FILE}-wal"
107
  return 0
@@ -111,37 +109,30 @@ extract_db() {
111
  }
112
 
113
  # ==============================================================================
114
- # 阶段 1: 恢复流程 (前台运行,阻塞应用启动)
115
  # ==============================================================================
116
 
117
- log ">>> 开始初始化检查 (同步模式)..."
118
 
119
  if [ -f "$DATA_DIR/$DB_FILE" ] && [ -s "$DATA_DIR/$DB_FILE" ]; then
120
  log "本地数据库已存在,跳过恢复。"
121
  else
122
- log "检查所有备份源..."
123
  CANDIDATES_FILE="/tmp/backup_candidates.txt"
124
  > "$CANDIDATES_FILE"
125
 
126
- # 1. 检查 S3 (主)
127
  if has_s3; then
128
- F_S3=$(get_s3_latest_name "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$S3_REGION")
129
- [ -n "$F_S3" ] && echo "$F_S3 S3_MAIN" >> "$CANDIDATES_FILE" && log "发现 S3(主): $F_S3"
130
  fi
131
-
132
- # 2. 检查 S3 (备)
133
  if has_s3_2; then
134
- F_S3_2=$(get_s3_latest_name "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$S3_2_REGION")
135
- [ -n "$F_S3_2" ] && echo "$F_S3_2 S3_SEC" >> "$CANDIDATES_FILE" && log "发现 S3(备): $F_S3_2"
136
  fi
137
-
138
- # 3. ���查 WebDAV
139
  if has_webdav; then
140
- F_DAV=$(get_webdav_latest_name)
141
- [ -n "$F_DAV" ] && echo "$F_DAV WEBDAV" >> "$CANDIDATES_FILE" && log "发现 WebDAV: $F_DAV"
142
  fi
143
 
144
- # 决策
145
  BEST_LINE=$(sort -r "$CANDIDATES_FILE" | head -n 1)
146
 
147
  if [ -n "$BEST_LINE" ]; then
@@ -149,25 +140,19 @@ else
149
  SOURCE_TYPE=$(echo "$BEST_LINE" | awk '{print $2}')
150
  DL_FILE="/tmp/restore.tar.gz"
151
 
152
- log ">>> 使用最新备份: $TARGET_FILE (来源: $SOURCE_TYPE)"
153
  SUCCESS=0
154
 
155
  case "$SOURCE_TYPE" in
156
- "S3_MAIN")
157
- download_s3_file "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$S3_REGION" "$TARGET_FILE" "$DL_FILE" && SUCCESS=1
158
- ;;
159
- "S3_SEC")
160
- download_s3_file "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$S3_2_REGION" "$TARGET_FILE" "$DL_FILE" && SUCCESS=1
161
- ;;
162
- "WEBDAV")
163
- download_webdav_file "$TARGET_FILE" "$DL_FILE" && SUCCESS=1
164
- ;;
165
  esac
166
 
167
- if [ $SUCCESS -eq 1 ] && extract_db "$DL_FILE"; then
168
- log "✅ 恢复成功!数据已就绪。"
169
  else
170
- log "❌ 恢复失败: 下载或解压出错"
171
  fi
172
  rm -f "$DL_FILE"
173
  else
@@ -176,61 +161,84 @@ else
176
  rm -f "$CANDIDATES_FILE"
177
  fi
178
 
179
- log ">>> 初始化结束,允许应用启动"
180
 
181
  # ==============================================================================
182
- # 阶段 2: 后台备份循环
183
  # ==============================================================================
184
  (
185
- # 延时 60s 启动备份循环,避免与应用启动争抢资源
186
  sleep 60
187
-
188
  while true; do
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
  if [ -f "$DATA_DIR/$DB_FILE" ]; then
190
  TS=$(date +%Y%m%d_%H%M%S)
191
  BACKUP_NAME="landppt_backup_${TS}.tar.gz"
192
  TMP_BAK="/tmp/$BACKUP_NAME"
193
 
194
- # --- 打包 (支持 WAL 文件) ---
195
- # 关键修复: 备份所有 landppt.db* 文件 (包括 -wal 和 -shm)
196
- # 使用 cd 避免路径前缀问题
197
- (cd "$DATA_DIR" && tar -czf "$TMP_BAK" landppt.db* 2>/dev/null)
 
 
 
 
 
 
 
 
198
 
199
- # --- 1. WebDAV 备份 + 清理 ---
200
  if has_webdav; then
201
  UPLOAD_URL=$(get_webdav_url "$BACKUP_NAME")
202
  if run_with_timeout "$TIMEOUT_CMD" curl -s -f --connect-timeout 15 \
203
  -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \
204
  -T "$TMP_BAK" "$UPLOAD_URL" >/dev/null 2>&1; then
205
 
206
- # WebDAV 清理逻辑
207
  LIST_URL=$(get_webdav_url "")
208
  ALL_FILES=$(curl -s -X PROPFIND -H "Depth: 1" -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" --connect-timeout 15 "$LIST_URL" \
209
  | grep -o 'landppt_backup_[0-9_]*\.tar\.gz' | sort -u | sort)
210
-
211
  COUNT=$(echo "$ALL_FILES" | grep -c .)
212
  if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
213
  DEL_COUNT=$(($COUNT - $BACKUP_KEEP))
214
  echo "$ALL_FILES" | head -n "$DEL_COUNT" | while read -r F; do
215
- if [ -n "$F" ]; then
216
- DEL_URL=$(get_webdav_url "$F")
217
- curl -s -X DELETE -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" "$DEL_URL" >/dev/null 2>&1
218
- fi
219
  done
220
  fi
221
- else
222
- log "WebDAV 上传失败"
223
  fi
224
  fi
225
 
226
- # --- 2. S3 (主) 备份 + 清理 ---
227
  if has_s3; then
228
  export AWS_ACCESS_KEY_ID="$S3_ACCESS_KEY_ID"
229
  export AWS_SECRET_ACCESS_KEY="$S3_SECRET_ACCESS_KEY"
230
  export AWS_DEFAULT_REGION="$S3_REGION"
231
 
232
  if run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 cp "$TMP_BAK" "s3://$S3_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1; then
233
- # S3 清理逻辑
234
  FILES=$(aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 ls "s3://$S3_BUCKET/" 2>/dev/null | awk '{print $4}' | grep 'landppt_backup_' | sort)
235
  COUNT=$(echo "$FILES" | grep -c .)
236
  if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
@@ -239,19 +247,16 @@ log ">>> 初始化结束,允许应用启动。"
239
  [ -n "$F" ] && aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 rm "s3://$S3_BUCKET/$F" --quiet
240
  done
241
  fi
242
- else
243
- log "S3(主) 上传失败"
244
  fi
245
  fi
246
 
247
- # --- 3. S3 (备) 备份 + 清理 (可选) ---
248
  if has_s3_2; then
249
  export AWS_ACCESS_KEY_ID="$S3_2_ACCESS_KEY_ID"
250
  export AWS_SECRET_ACCESS_KEY="$S3_2_SECRET_ACCESS_KEY"
251
  export AWS_DEFAULT_REGION="$S3_2_REGION"
252
-
253
- if run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 cp "$TMP_BAK" "s3://$S3_2_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1; then
254
- # S3 清理逻辑
255
  FILES=$(aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 ls "s3://$S3_2_BUCKET/" 2>/dev/null | awk '{print $4}' | grep 'landppt_backup_' | sort)
256
  COUNT=$(echo "$FILES" | grep -c .)
257
  if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
@@ -266,12 +271,10 @@ log ">>> 初始化结束,允许应用启动。"
266
  fi
267
 
268
  rm -f "$TMP_BAK"
269
- log "备份周期完成: $BACKUP_NAME"
270
  fi
271
-
272
  sleep "$SYNC_INTERVAL"
273
  done
274
  ) &
275
 
276
- # 退出脚本,允许 Docker 继续执行 CMD 启动应用
277
- exit 0
 
1
  #!/bin/bash
2
  # ==============================================================================
3
+ # landppt 完整诊断
4
+ # 功能: 1. 完整备份/恢复(S3+WebDAV) 2. 修复WAL/启动顺序 3. 新增配置文件监控日志
5
  # ==============================================================================
6
 
7
  DATA_DIR="."
8
  DB_FILE="landppt.db"
9
+ # 定义要监控和备份的配置文件
10
+ CONFIG_FILES=".env"
11
+
12
  SYNC_INTERVAL="${SYNC_INTERVAL:-600}"
13
  BACKUP_KEEP="${BACKUP_KEEP:-24}"
14
  TIMEOUT_RESTORE="120"
15
  TIMEOUT_CMD="180"
16
 
 
17
  S3_REGION="${S3_REGION:-auto}"
18
  S3_2_REGION="${S3_2_REGION:-auto}"
19
 
 
29
  fi
30
  }
31
 
32
+ # ----------------- 基础工具函数 (完整保留) -----------------
33
  has_webdav() { [[ -n "$WEBDAV_URL" && -n "$WEBDAV_USERNAME" && -n "$WEBDAV_PASSWORD" ]]; }
34
  has_s3() { [[ -n "$S3_ENDPOINT_URL" && -n "$S3_BUCKET" && -n "$S3_ACCESS_KEY_ID" ]]; }
35
  has_s3_2() { [[ -n "$S3_2_ENDPOINT_URL" && -n "$S3_2_BUCKET" && -n "$S3_2_ACCESS_KEY_ID" ]]; }
36
 
 
37
  get_webdav_url() {
38
  local file="$1"
39
  local base="${WEBDAV_URL%/}"
 
61
  -o "$dl_path" "$(get_webdav_url "$file")"
62
  }
63
 
 
64
  get_s3_latest_name() {
65
  local ENDPOINT="$1" BUCKET="$2" ACCESS="$3" SECRET="$4" REGION="$5"
66
  export AWS_ACCESS_KEY_ID="$ACCESS"
67
  export AWS_SECRET_ACCESS_KEY="$SECRET"
68
  export AWS_DEFAULT_REGION="$REGION"
 
69
  run_with_timeout 30 aws --endpoint-url "$ENDPOINT" --region "$REGION" s3 ls "s3://$BUCKET/" 2>/dev/null \
70
  | awk '{print $4}' | grep 'landppt_backup_.*\.tar\.gz$' | sort | tail -n 1
71
  }
 
76
  export AWS_ACCESS_KEY_ID="$ACCESS"
77
  export AWS_SECRET_ACCESS_KEY="$SECRET"
78
  export AWS_DEFAULT_REGION="$REGION"
 
79
  rm -f "$DL_PATH"
80
  if run_with_timeout "$TIMEOUT_RESTORE" aws --endpoint-url "$ENDPOINT" --region "$REGION" s3 cp "s3://$BUCKET/$FILE" "$DL_PATH" --quiet; then
81
  [ -s "$DL_PATH" ] && return 0
 
83
  return 1
84
  }
85
 
86
+ # ----------------- 解压/打包逻辑 -----------------
87
+
88
+ extract_data() {
89
  local tar_path="$1"
90
  mkdir -p "$DATA_DIR"
91
 
92
  # 尝试解压
93
  if tar -xzf "$tar_path" -C "$DATA_DIR" 2>/dev/null; then
94
+ # 验证数据库
95
  if ls "$DATA_DIR"/landppt.db* 1> /dev/null 2>&1; then
96
  return 0
97
  fi
98
 
99
+ # 兼容旧路径查找
100
  local found=$(find "$DATA_DIR" -name "landppt.db" -type f | head -n 1)
101
  if [ -n "$found" ] && [ "$found" != "$DATA_DIR/$DB_FILE" ]; then
102
  mv "$found" "$DATA_DIR/$DB_FILE"
 
103
  [ -f "${found}-shm" ] && mv "${found}-shm" "$DATA_DIR/${DB_FILE}-shm"
104
  [ -f "${found}-wal" ] && mv "${found}-wal" "$DATA_DIR/${DB_FILE}-wal"
105
  return 0
 
109
  }
110
 
111
  # ==============================================================================
112
+ # 阶段 1: 恢复流程 (阻塞应用启动)
113
  # ==============================================================================
114
 
115
+ log ">>> 启动初始化检查..."
116
 
117
  if [ -f "$DATA_DIR/$DB_FILE" ] && [ -s "$DATA_DIR/$DB_FILE" ]; then
118
  log "本地数据库已存在,跳过恢复。"
119
  else
 
120
  CANDIDATES_FILE="/tmp/backup_candidates.txt"
121
  > "$CANDIDATES_FILE"
122
 
 
123
  if has_s3; then
124
+ F=$(get_s3_latest_name "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$S3_REGION")
125
+ [ -n "$F" ] && echo "$F S3_MAIN" >> "$CANDIDATES_FILE" && log "发现 S3(主): $F"
126
  fi
 
 
127
  if has_s3_2; then
128
+ F=$(get_s3_latest_name "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$S3_2_REGION")
129
+ [ -n "$F" ] && echo "$F S3_SEC" >> "$CANDIDATES_FILE" && log "发现 S3(备): $F"
130
  fi
 
 
131
  if has_webdav; then
132
+ F=$(get_webdav_latest_name)
133
+ [ -n "$F" ] && echo "$F WEBDAV" >> "$CANDIDATES_FILE" && log "发现 WebDAV: $F"
134
  fi
135
 
 
136
  BEST_LINE=$(sort -r "$CANDIDATES_FILE" | head -n 1)
137
 
138
  if [ -n "$BEST_LINE" ]; then
 
140
  SOURCE_TYPE=$(echo "$BEST_LINE" | awk '{print $2}')
141
  DL_FILE="/tmp/restore.tar.gz"
142
 
143
+ log ">>> 正在恢复: $TARGET_FILE (来源: $SOURCE_TYPE)"
144
  SUCCESS=0
145
 
146
  case "$SOURCE_TYPE" in
147
+ "S3_MAIN") download_s3_file "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$S3_REGION" "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;;
148
+ "S3_SEC") download_s3_file "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$S3_2_REGION" "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;;
149
+ "WEBDAV") download_webdav_file "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;;
 
 
 
 
 
 
150
  esac
151
 
152
+ if [ $SUCCESS -eq 1 ] && extract_data "$DL_FILE"; then
153
+ log "✅ 恢复成功!"
154
  else
155
+ log "❌ 恢复失败"
156
  fi
157
  rm -f "$DL_FILE"
158
  else
 
161
  rm -f "$CANDIDATES_FILE"
162
  fi
163
 
164
+ log ">>> 初始化结束,应用正在启动..."
165
 
166
  # ==============================================================================
167
+ # 阶段 2: 后台备份循环 (含诊断日志)
168
  # ==============================================================================
169
  (
 
170
  sleep 60
 
171
  while true; do
172
+
173
+ # --- 🔎 诊断部分开始 ---
174
+ log "--- 开始周期性检查 ---"
175
+ if [ -f "$DATA_DIR/.env" ]; then
176
+ if command -v stat >/dev/null; then
177
+ F_INFO=$(stat -c "%y (大小: %s)" "$DATA_DIR/.env")
178
+ else
179
+ F_INFO="存在"
180
+ fi
181
+ log "🔎 发现 .env 文件: $F_INFO"
182
+
183
+ # 检查是否有内容变化 (仅检查 key 是否存在,不泄露 key)
184
+ if grep -q "API_KEY" "$DATA_DIR/.env"; then
185
+ log "🔎 .env 文件包含 API_KEY 配置项"
186
+ else
187
+ log "⚠️ .env 文件可能为空或未包含 API_KEY"
188
+ fi
189
+ else
190
+ log "⚠️ .env 文件不存在"
191
+ fi
192
+ # --- 🔎 诊断部分结束 ---
193
+
194
+ # 只要存在数据库文件,就执行备份
195
  if [ -f "$DATA_DIR/$DB_FILE" ]; then
196
  TS=$(date +%Y%m%d_%H%M%S)
197
  BACKUP_NAME="landppt_backup_${TS}.tar.gz"
198
  TMP_BAK="/tmp/$BACKUP_NAME"
199
 
200
+ # --- 打包 (包含 .env 如果存在) ---
201
+ (
202
+ cd "$DATA_DIR"
203
+ # 动态构建文件列表:数据库文件 + 存在的配置文件
204
+ FILES="landppt.db*"
205
+ for cf in $CONFIG_FILES; do
206
+ [ -f "$cf" ] && FILES="$FILES $cf"
207
+ done
208
+
209
+ # 执行打包,忽略 "file changed" 警告
210
+ tar -czf "$TMP_BAK" $FILES >/dev/null 2>&1 || [ $? -eq 1 ]
211
+ )
212
 
213
+ # --- 1. WebDAV 备份 ---
214
  if has_webdav; then
215
  UPLOAD_URL=$(get_webdav_url "$BACKUP_NAME")
216
  if run_with_timeout "$TIMEOUT_CMD" curl -s -f --connect-timeout 15 \
217
  -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \
218
  -T "$TMP_BAK" "$UPLOAD_URL" >/dev/null 2>&1; then
219
 
220
+ # WebDAV 清理
221
  LIST_URL=$(get_webdav_url "")
222
  ALL_FILES=$(curl -s -X PROPFIND -H "Depth: 1" -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" --connect-timeout 15 "$LIST_URL" \
223
  | grep -o 'landppt_backup_[0-9_]*\.tar\.gz' | sort -u | sort)
 
224
  COUNT=$(echo "$ALL_FILES" | grep -c .)
225
  if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
226
  DEL_COUNT=$(($COUNT - $BACKUP_KEEP))
227
  echo "$ALL_FILES" | head -n "$DEL_COUNT" | while read -r F; do
228
+ [ -n "$F" ] && curl -s -X DELETE -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" "$(get_webdav_url "$F")" >/dev/null 2>&1
 
 
 
229
  done
230
  fi
 
 
231
  fi
232
  fi
233
 
234
+ # --- 2. S3 (主) 备份 ---
235
  if has_s3; then
236
  export AWS_ACCESS_KEY_ID="$S3_ACCESS_KEY_ID"
237
  export AWS_SECRET_ACCESS_KEY="$S3_SECRET_ACCESS_KEY"
238
  export AWS_DEFAULT_REGION="$S3_REGION"
239
 
240
  if run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 cp "$TMP_BAK" "s3://$S3_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1; then
241
+ # S3 清理
242
  FILES=$(aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 ls "s3://$S3_BUCKET/" 2>/dev/null | awk '{print $4}' | grep 'landppt_backup_' | sort)
243
  COUNT=$(echo "$FILES" | grep -c .)
244
  if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
 
247
  [ -n "$F" ] && aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 rm "s3://$S3_BUCKET/$F" --quiet
248
  done
249
  fi
 
 
250
  fi
251
  fi
252
 
253
+ # --- 3. S3 (备) 备份 ---
254
  if has_s3_2; then
255
  export AWS_ACCESS_KEY_ID="$S3_2_ACCESS_KEY_ID"
256
  export AWS_SECRET_ACCESS_KEY="$S3_2_SECRET_ACCESS_KEY"
257
  export AWS_DEFAULT_REGION="$S3_2_REGION"
258
+ run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 cp "$TMP_BAK" "s3://$S3_2_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1
259
+ # S3 清理逻辑
 
260
  FILES=$(aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 ls "s3://$S3_2_BUCKET/" 2>/dev/null | awk '{print $4}' | grep 'landppt_backup_' | sort)
261
  COUNT=$(echo "$FILES" | grep -c .)
262
  if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
 
271
  fi
272
 
273
  rm -f "$TMP_BAK"
274
+ log "备份完成: $BACKUP_NAME"
275
  fi
 
276
  sleep "$SYNC_INTERVAL"
277
  done
278
  ) &
279
 
280
+ exit 0```