|
|
#!/bin/bash |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DATA_DIR="." |
|
|
DB_FILE="landppt.db" |
|
|
|
|
|
CONFIG_FILES=".env config.json config.yaml" |
|
|
|
|
|
SYNC_INTERVAL="${SYNC_INTERVAL:-600}" |
|
|
BACKUP_KEEP="${BACKUP_KEEP:-24}" |
|
|
TIMEOUT_RESTORE="120" |
|
|
TIMEOUT_CMD="180" |
|
|
|
|
|
S3_REGION="${S3_REGION:-auto}" |
|
|
S3_2_REGION="${S3_2_REGION:-auto}" |
|
|
|
|
|
log() { echo "[Backup] $(date '+%Y-%m-%d %H:%M:%S') $*"; } |
|
|
|
|
|
run_with_timeout() { |
|
|
local t="$1"; shift |
|
|
if command -v timeout >/dev/null; then |
|
|
timeout "$t" "$@" |
|
|
return $? |
|
|
else |
|
|
"$@" |
|
|
fi |
|
|
} |
|
|
|
|
|
|
|
|
has_webdav() { [[ -n "$WEBDAV_URL" && -n "$WEBDAV_USERNAME" && -n "$WEBDAV_PASSWORD" ]]; } |
|
|
has_s3() { [[ -n "$S3_ENDPOINT_URL" && -n "$S3_BUCKET" && -n "$S3_ACCESS_KEY_ID" ]]; } |
|
|
has_s3_2() { [[ -n "$S3_2_ENDPOINT_URL" && -n "$S3_2_BUCKET" && -n "$S3_2_ACCESS_KEY_ID" ]]; } |
|
|
|
|
|
get_webdav_url() { |
|
|
local file="$1" |
|
|
local base="${WEBDAV_URL%/}" |
|
|
local sub="${WEBDAV_BACKUP_PATH#/}" |
|
|
sub="${sub%/}" |
|
|
[ -n "$sub" ] && echo "$base/$sub/$file" || echo "$base/$file" |
|
|
} |
|
|
|
|
|
get_webdav_latest_name() { |
|
|
local url=$(get_webdav_url "") |
|
|
run_with_timeout 30 curl -s -X PROPFIND -H "Depth: 1" \ |
|
|
-u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \ |
|
|
--connect-timeout 15 "$url" \ |
|
|
| grep -o 'landppt_backup_[0-9_]*\.tar\.gz' \ |
|
|
| sort -u | sort | tail -n 1 |
|
|
} |
|
|
|
|
|
download_webdav_file() { |
|
|
local file="$1" |
|
|
local dl_path="$2" |
|
|
log "从 WebDAV 下载: $file ..." |
|
|
run_with_timeout "$TIMEOUT_RESTORE" curl -s -f -L \ |
|
|
-u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \ |
|
|
--connect-timeout 15 \ |
|
|
-o "$dl_path" "$(get_webdav_url "$file")" |
|
|
} |
|
|
|
|
|
get_s3_latest_name() { |
|
|
local ENDPOINT="$1" BUCKET="$2" ACCESS="$3" SECRET="$4" REGION="$5" |
|
|
export AWS_ACCESS_KEY_ID="$ACCESS" |
|
|
export AWS_SECRET_ACCESS_KEY="$SECRET" |
|
|
export AWS_DEFAULT_REGION="$REGION" |
|
|
run_with_timeout 30 aws --endpoint-url "$ENDPOINT" --region "$REGION" s3 ls "s3://$BUCKET/" 2>/dev/null \ |
|
|
| awk '{print $4}' | grep 'landppt_backup_.*\.tar\.gz$' | sort | tail -n 1 |
|
|
} |
|
|
|
|
|
download_s3_file() { |
|
|
local ENDPOINT="$1" BUCKET="$2" ACCESS="$3" SECRET="$4" REGION="$5" FILE="$6" DL_PATH="$7" |
|
|
log "从 S3 下载: $FILE (Region: $REGION)..." |
|
|
export AWS_ACCESS_KEY_ID="$ACCESS" |
|
|
export AWS_SECRET_ACCESS_KEY="$SECRET" |
|
|
export AWS_DEFAULT_REGION="$REGION" |
|
|
rm -f "$DL_PATH" |
|
|
if run_with_timeout "$TIMEOUT_RESTORE" aws --endpoint-url "$ENDPOINT" --region "$REGION" s3 cp "s3://$BUCKET/$FILE" "$DL_PATH" --quiet; then |
|
|
[ -s "$DL_PATH" ] && return 0 |
|
|
fi |
|
|
return 1 |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
extract_data() { |
|
|
local tar_path="$1" |
|
|
mkdir -p "$DATA_DIR" |
|
|
|
|
|
|
|
|
if tar -xzf "$tar_path" -C "$DATA_DIR" 2>/dev/null; then |
|
|
|
|
|
|
|
|
if ls "$DATA_DIR"/landppt.db* 1> /dev/null 2>&1; then |
|
|
return 0 |
|
|
fi |
|
|
|
|
|
|
|
|
local found=$(find "$DATA_DIR" -name "landppt.db" -type f | head -n 1) |
|
|
if [ -n "$found" ] && [ "$found" != "$DATA_DIR/$DB_FILE" ]; then |
|
|
mv "$found" "$DATA_DIR/$DB_FILE" |
|
|
[ -f "${found}-shm" ] && mv "${found}-shm" "$DATA_DIR/${DB_FILE}-shm" |
|
|
[ -f "${found}-wal" ] && mv "${found}-wal" "$DATA_DIR/${DB_FILE}-wal" |
|
|
return 0 |
|
|
fi |
|
|
fi |
|
|
return 1 |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
log ">>> 启动初始化..." |
|
|
|
|
|
|
|
|
if [ -f "$DATA_DIR/$DB_FILE" ] && [ -s "$DATA_DIR/$DB_FILE" ]; then |
|
|
log "本地数据已存在,跳过恢复。" |
|
|
else |
|
|
CANDIDATES_FILE="/tmp/backup_candidates.txt" |
|
|
> "$CANDIDATES_FILE" |
|
|
|
|
|
if has_s3; then |
|
|
F=$(get_s3_latest_name "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$S3_REGION") |
|
|
[ -n "$F" ] && echo "$F S3_MAIN" >> "$CANDIDATES_FILE" && log "发现 S3(主): $F" |
|
|
fi |
|
|
if has_s3_2; then |
|
|
F=$(get_s3_latest_name "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$S3_2_REGION") |
|
|
[ -n "$F" ] && echo "$F S3_SEC" >> "$CANDIDATES_FILE" && log "发现 S3(备): $F" |
|
|
fi |
|
|
if has_webdav; then |
|
|
F=$(get_webdav_latest_name) |
|
|
[ -n "$F" ] && echo "$F WEBDAV" >> "$CANDIDATES_FILE" && log "发现 WebDAV: $F" |
|
|
fi |
|
|
|
|
|
BEST_LINE=$(sort -r "$CANDIDATES_FILE" | head -n 1) |
|
|
|
|
|
if [ -n "$BEST_LINE" ]; then |
|
|
TARGET_FILE=$(echo "$BEST_LINE" | awk '{print $1}') |
|
|
SOURCE_TYPE=$(echo "$BEST_LINE" | awk '{print $2}') |
|
|
DL_FILE="/tmp/restore.tar.gz" |
|
|
|
|
|
log ">>> 正在恢复: $TARGET_FILE (来源: $SOURCE_TYPE)" |
|
|
SUCCESS=0 |
|
|
|
|
|
case "$SOURCE_TYPE" in |
|
|
"S3_MAIN") download_s3_file "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$S3_REGION" "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;; |
|
|
"S3_SEC") download_s3_file "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$S3_2_REGION" "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;; |
|
|
"WEBDAV") download_webdav_file "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;; |
|
|
esac |
|
|
|
|
|
if [ $SUCCESS -eq 1 ] && extract_data "$DL_FILE"; then |
|
|
log "✅ 恢复成功!(数据与配置已就绪)" |
|
|
else |
|
|
log "❌ 恢复失败" |
|
|
fi |
|
|
rm -f "$DL_FILE" |
|
|
else |
|
|
log "未找到备份,启动全新实例。" |
|
|
fi |
|
|
rm -f "$CANDIDATES_FILE" |
|
|
fi |
|
|
|
|
|
log ">>> 初始化结束,应用正在启动..." |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
( |
|
|
sleep 60 |
|
|
while true; do |
|
|
if [ -f "$DATA_DIR/$DB_FILE" ]; then |
|
|
TS=$(date +%Y%m%d_%H%M%S) |
|
|
BACKUP_NAME="landppt_backup_${TS}.tar.gz" |
|
|
TMP_BAK="/tmp/$BACKUP_NAME" |
|
|
|
|
|
|
|
|
( |
|
|
cd "$DATA_DIR" |
|
|
FILES_TO_BACKUP="landppt.db*" |
|
|
for cf in $CONFIG_FILES; do |
|
|
if [ -f "$cf" ]; then |
|
|
FILES_TO_BACKUP="$FILES_TO_BACKUP $cf" |
|
|
fi |
|
|
done |
|
|
tar -czf "$TMP_BAK" $FILES_TO_BACKUP >/dev/null 2>&1 || [ $? -eq 1 ] |
|
|
) |
|
|
|
|
|
|
|
|
if has_webdav; then |
|
|
UPLOAD_URL=$(get_webdav_url "$BACKUP_NAME") |
|
|
if run_with_timeout "$TIMEOUT_CMD" curl -s -f --connect-timeout 15 \ |
|
|
-u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \ |
|
|
-T "$TMP_BAK" "$UPLOAD_URL" >/dev/null 2>&1; then |
|
|
|
|
|
|
|
|
LIST_URL=$(get_webdav_url "") |
|
|
ALL_FILES=$(curl -s -X PROPFIND -H "Depth: 1" -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" --connect-timeout 15 "$LIST_URL" \ |
|
|
| grep -o 'landppt_backup_[0-9_]*\.tar\.gz' | sort -u | sort) |
|
|
COUNT=$(echo "$ALL_FILES" | grep -c .) |
|
|
if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then |
|
|
DEL_COUNT=$(($COUNT - $BACKUP_KEEP)) |
|
|
echo "$ALL_FILES" | head -n "$DEL_COUNT" | while read -r F; do |
|
|
[ -n "$F" ] && curl -s -X DELETE -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" "$(get_webdav_url "$F")" >/dev/null 2>&1 |
|
|
done |
|
|
fi |
|
|
fi |
|
|
fi |
|
|
|
|
|
|
|
|
if has_s3; then |
|
|
export AWS_ACCESS_KEY_ID="$S3_ACCESS_KEY_ID" |
|
|
export AWS_SECRET_ACCESS_KEY="$S3_SECRET_ACCESS_KEY" |
|
|
export AWS_DEFAULT_REGION="$S3_REGION" |
|
|
|
|
|
if run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 cp "$TMP_BAK" "s3://$S3_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1; then |
|
|
|
|
|
FILES=$(aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 ls "s3://$S3_BUCKET/" 2>/dev/null | awk '{print $4}' | grep 'landppt_backup_' | sort) |
|
|
COUNT=$(echo "$FILES" | grep -c .) |
|
|
if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then |
|
|
DEL=$(($COUNT - $BACKUP_KEEP)) |
|
|
echo "$FILES" | head -n "$DEL" | while read -r F; do |
|
|
[ -n "$F" ] && aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 rm "s3://$S3_BUCKET/$F" --quiet |
|
|
done |
|
|
fi |
|
|
fi |
|
|
fi |
|
|
|
|
|
|
|
|
if has_s3_2; then |
|
|
export AWS_ACCESS_KEY_ID="$S3_2_ACCESS_KEY_ID" |
|
|
export AWS_SECRET_ACCESS_KEY="$S3_2_SECRET_ACCESS_KEY" |
|
|
export AWS_DEFAULT_REGION="$S3_2_REGION" |
|
|
|
|
|
if run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 cp "$TMP_BAK" "s3://$S3_2_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1; then |
|
|
|
|
|
FILES=$(aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 ls "s3://$S3_2_BUCKET/" 2>/dev/null | awk '{print $4}' | grep 'landppt_backup_' | sort) |
|
|
COUNT=$(echo "$FILES" | grep -c .) |
|
|
if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then |
|
|
DEL=$(($COUNT - $BACKUP_KEEP)) |
|
|
echo "$FILES" | head -n "$DEL" | while read -r F; do |
|
|
[ -n "$F" ] && aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 rm "s3://$S3_2_BUCKET/$F" --quiet |
|
|
done |
|
|
fi |
|
|
fi |
|
|
fi |
|
|
|
|
|
rm -f "$TMP_BAK" |
|
|
log "备份完成: $BACKUP_NAME" |
|
|
fi |
|
|
sleep "$SYNC_INTERVAL" |
|
|
done |
|
|
) & |
|
|
|
|
|
exit 0 |