File size: 10,670 Bytes
ea2978f
 
14a7e64
 
ea2978f
 
3875680
ea89910
14a7e64
7d22844
a3f3b7f
ea2978f
 
3875680
 
ea2978f
b75ef2e
 
 
ea2978f
 
 
 
 
 
ea89910
ea2978f
 
 
 
 
7d22844
250aae0
 
 
 
 
 
 
 
 
ea89910
ea2978f
 
250aae0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ea89910
250aae0
 
ea2978f
b75ef2e
ea2978f
 
b75ef2e
 
3875680
ea2978f
 
3875680
b75ef2e
 
3875680
 
b75ef2e
3875680
250aae0
 
3875680
250aae0
3875680
 
7d22844
a3f3b7f
 
250aae0
 
0d068a3
14a7e64
250aae0
7d22844
14a7e64
ea89910
0d068a3
 
 
14a7e64
250aae0
ea89910
 
 
 
0d068a3
250aae0
 
 
ea2978f
 
250aae0
14a7e64
250aae0
ea2978f
14a7e64
0d068a3
14a7e64
ea89910
14a7e64
ea2978f
 
 
 
 
a3f3b7f
 
ea2978f
 
a3f3b7f
 
ea2978f
 
a3f3b7f
 
ea2978f
 
 
ad66826
ea2978f
 
 
 
b75ef2e
a3f3b7f
b75ef2e
ad66826
ea2978f
a3f3b7f
 
 
ea2978f
 
a3f3b7f
14a7e64
b1accd3
a3f3b7f
ea2978f
b75ef2e
ea2978f
ea89910
ea2978f
 
 
 
a3f3b7f
0d068a3
250aae0
7d22844
250aae0
ea2978f
0d068a3
ea2978f
ea89910
ea2978f
ad66826
ea2978f
ad66826
14a7e64
a3f3b7f
 
7d22844
a3f3b7f
7d22844
 
 
a3f3b7f
7d22844
a3f3b7f
ad66826
14a7e64
3875680
250aae0
 
3875680
250aae0
 
a3f3b7f
0d068a3
 
 
 
 
ea89910
0d068a3
a3f3b7f
0d068a3
 
250aae0
3875680
 
14a7e64
3875680
 
 
b75ef2e
ea2978f
b75ef2e
a3f3b7f
b75ef2e
3875680
b1accd3
ea89910
 
b75ef2e
b1accd3
 
ea2978f
 
3875680
14a7e64
3875680
 
 
b75ef2e
7d22844
 
14a7e64
ea89910
 
 
 
 
 
 
 
 
3875680
 
 
14a7e64
ea2978f
0d068a3
ea2978f
0d068a3
 
7d22844
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
#!/bin/bash
# ==============================================================================
# landppt 最终生产版
# 功能: 全量备份(数据+配置) | 双S3+WebDAV | 自动清理 | 启动保护
# ==============================================================================

DATA_DIR="."
DB_FILE="landppt.db"
# 关键: 备份数据库同时,也备份系统配置文件
CONFIG_FILES=".env config.json config.yaml"

SYNC_INTERVAL="${SYNC_INTERVAL:-600}"
BACKUP_KEEP="${BACKUP_KEEP:-24}"
TIMEOUT_RESTORE="120"
TIMEOUT_CMD="180"

S3_REGION="${S3_REGION:-auto}"
S3_2_REGION="${S3_2_REGION:-auto}"

log() { echo "[Backup] $(date '+%Y-%m-%d %H:%M:%S') $*"; }

run_with_timeout() {
    local t="$1"; shift
    if command -v timeout >/dev/null; then
        timeout "$t" "$@"
        return $?
    else
        "$@"
    fi
}

# ----------------- 基础工具函数 -----------------
has_webdav() { [[ -n "$WEBDAV_URL" && -n "$WEBDAV_USERNAME" && -n "$WEBDAV_PASSWORD" ]]; }
has_s3()     { [[ -n "$S3_ENDPOINT_URL" && -n "$S3_BUCKET" && -n "$S3_ACCESS_KEY_ID" ]]; }
has_s3_2()   { [[ -n "$S3_2_ENDPOINT_URL" && -n "$S3_2_BUCKET" && -n "$S3_2_ACCESS_KEY_ID" ]]; }

get_webdav_url() {
    local file="$1"
    local base="${WEBDAV_URL%/}"
    local sub="${WEBDAV_BACKUP_PATH#/}"
    sub="${sub%/}"
    [ -n "$sub" ] && echo "$base/$sub/$file" || echo "$base/$file"
}

get_webdav_latest_name() {
    local url=$(get_webdav_url "")
    run_with_timeout 30 curl -s -X PROPFIND -H "Depth: 1" \
        -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \
        --connect-timeout 15 "$url" \
        | grep -o 'landppt_backup_[0-9_]*\.tar\.gz' \
        | sort -u | sort | tail -n 1
}

download_webdav_file() {
    local file="$1"
    local dl_path="$2"
    log "从 WebDAV 下载: $file ..."
    run_with_timeout "$TIMEOUT_RESTORE" curl -s -f -L \
        -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \
        --connect-timeout 15 \
        -o "$dl_path" "$(get_webdav_url "$file")"
}

get_s3_latest_name() {
    local ENDPOINT="$1" BUCKET="$2" ACCESS="$3" SECRET="$4" REGION="$5"
    export AWS_ACCESS_KEY_ID="$ACCESS"
    export AWS_SECRET_ACCESS_KEY="$SECRET"
    export AWS_DEFAULT_REGION="$REGION"
    run_with_timeout 30 aws --endpoint-url "$ENDPOINT" --region "$REGION" s3 ls "s3://$BUCKET/" 2>/dev/null \
        | awk '{print $4}' | grep 'landppt_backup_.*\.tar\.gz$' | sort | tail -n 1
}

download_s3_file() {
    local ENDPOINT="$1" BUCKET="$2" ACCESS="$3" SECRET="$4" REGION="$5" FILE="$6" DL_PATH="$7"
    log "从 S3 下载: $FILE (Region: $REGION)..."
    export AWS_ACCESS_KEY_ID="$ACCESS"
    export AWS_SECRET_ACCESS_KEY="$SECRET"
    export AWS_DEFAULT_REGION="$REGION"
    rm -f "$DL_PATH"
    if run_with_timeout "$TIMEOUT_RESTORE" aws --endpoint-url "$ENDPOINT" --region "$REGION" s3 cp "s3://$BUCKET/$FILE" "$DL_PATH" --quiet; then
        [ -s "$DL_PATH" ] && return 0
    fi
    return 1
}

# ----------------- 解压/打包 核心逻辑 -----------------

extract_data() {
    local tar_path="$1"
    mkdir -p "$DATA_DIR"
    
    # 解压所有内容 (数据库 + .env配置)
    if tar -xzf "$tar_path" -C "$DATA_DIR" 2>/dev/null; then
        
        # 验证数据库完整性
        if ls "$DATA_DIR"/landppt.db* 1> /dev/null 2>&1; then
            return 0
        fi
        
        # 兼容性修复:如果文件不在根目录,尝试查找并归位
        local found=$(find "$DATA_DIR" -name "landppt.db" -type f | head -n 1)
        if [ -n "$found" ] && [ "$found" != "$DATA_DIR/$DB_FILE" ]; then
            mv "$found" "$DATA_DIR/$DB_FILE"
            [ -f "${found}-shm" ] && mv "${found}-shm" "$DATA_DIR/${DB_FILE}-shm"
            [ -f "${found}-wal" ] && mv "${found}-wal" "$DATA_DIR/${DB_FILE}-wal"
            return 0
        fi
    fi
    return 1
}

# ==============================================================================
# 阶段 1: 恢复流程 (阻塞应用启动)
# ==============================================================================

log ">>> 启动初始化..."

# 只要本地有数据库,跳过恢复 (保护现有数据)
if [ -f "$DATA_DIR/$DB_FILE" ] && [ -s "$DATA_DIR/$DB_FILE" ]; then
    log "本地数据已存在,跳过恢复。"
else
    CANDIDATES_FILE="/tmp/backup_candidates.txt"
    > "$CANDIDATES_FILE"

    if has_s3; then
        F=$(get_s3_latest_name "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$S3_REGION")
        [ -n "$F" ] && echo "$F S3_MAIN" >> "$CANDIDATES_FILE" && log "发现 S3(主): $F"
    fi
    if has_s3_2; then
        F=$(get_s3_latest_name "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$S3_2_REGION")
        [ -n "$F" ] && echo "$F S3_SEC" >> "$CANDIDATES_FILE" && log "发现 S3(备): $F"
    fi
    if has_webdav; then
        F=$(get_webdav_latest_name)
        [ -n "$F" ] && echo "$F WEBDAV" >> "$CANDIDATES_FILE" && log "发现 WebDAV: $F"
    fi

    BEST_LINE=$(sort -r "$CANDIDATES_FILE" | head -n 1)
    
    if [ -n "$BEST_LINE" ]; then
        TARGET_FILE=$(echo "$BEST_LINE" | awk '{print $1}')
        SOURCE_TYPE=$(echo "$BEST_LINE" | awk '{print $2}')
        DL_FILE="/tmp/restore.tar.gz"
        
        log ">>> 正在恢复: $TARGET_FILE (来源: $SOURCE_TYPE)"
        SUCCESS=0
        
        case "$SOURCE_TYPE" in
            "S3_MAIN") download_s3_file "$S3_ENDPOINT_URL" "$S3_BUCKET" "$S3_ACCESS_KEY_ID" "$S3_SECRET_ACCESS_KEY" "$S3_REGION" "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;;
            "S3_SEC")  download_s3_file "$S3_2_ENDPOINT_URL" "$S3_2_BUCKET" "$S3_2_ACCESS_KEY_ID" "$S3_2_SECRET_ACCESS_KEY" "$S3_2_REGION" "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;;
            "WEBDAV")  download_webdav_file "$TARGET_FILE" "$DL_FILE" && SUCCESS=1 ;;
        esac
        
        if [ $SUCCESS -eq 1 ] && extract_data "$DL_FILE"; then
            log "✅ 恢复成功!(数据与配置已就绪)"
        else
            log "❌ 恢复失败"
        fi
        rm -f "$DL_FILE"
    else
        log "未找到备份,启动全新实例。"
    fi
    rm -f "$CANDIDATES_FILE"
fi

log ">>> 初始化结束,应用正在启动..."

# ==============================================================================
# 阶段 2: 后台备份循环
# ==============================================================================
(
    sleep 60
    while true; do
        if [ -f "$DATA_DIR/$DB_FILE" ]; then
            TS=$(date +%Y%m%d_%H%M%S)
            BACKUP_NAME="landppt_backup_${TS}.tar.gz"
            TMP_BAK="/tmp/$BACKUP_NAME"
            
            # --- 打包逻辑 (同时备份DB和配置) ---
            (
                cd "$DATA_DIR"
                FILES_TO_BACKUP="landppt.db*"
                for cf in $CONFIG_FILES; do
                    if [ -f "$cf" ]; then
                        FILES_TO_BACKUP="$FILES_TO_BACKUP $cf"
                    fi
                done
                tar -czf "$TMP_BAK" $FILES_TO_BACKUP >/dev/null 2>&1 || [ $? -eq 1 ]
            )
            
            # --- 1. WebDAV 备份 ---
            if has_webdav; then
                UPLOAD_URL=$(get_webdav_url "$BACKUP_NAME")
                if run_with_timeout "$TIMEOUT_CMD" curl -s -f --connect-timeout 15 \
                    -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" \
                    -T "$TMP_BAK" "$UPLOAD_URL" >/dev/null 2>&1; then
                    
                    # WebDAV 清理
                    LIST_URL=$(get_webdav_url "")
                    ALL_FILES=$(curl -s -X PROPFIND -H "Depth: 1" -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" --connect-timeout 15 "$LIST_URL" \
                        | grep -o 'landppt_backup_[0-9_]*\.tar\.gz' | sort -u | sort)
                    COUNT=$(echo "$ALL_FILES" | grep -c .)
                    if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
                        DEL_COUNT=$(($COUNT - $BACKUP_KEEP))
                        echo "$ALL_FILES" | head -n "$DEL_COUNT" | while read -r F; do
                            [ -n "$F" ] && curl -s -X DELETE -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" "$(get_webdav_url "$F")" >/dev/null 2>&1
                        done
                    fi
                fi
            fi
            
            # --- 2. S3 (主) 备份 ---
            if has_s3; then
                export AWS_ACCESS_KEY_ID="$S3_ACCESS_KEY_ID"
                export AWS_SECRET_ACCESS_KEY="$S3_SECRET_ACCESS_KEY"
                export AWS_DEFAULT_REGION="$S3_REGION"
                
                if run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 cp "$TMP_BAK" "s3://$S3_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1; then
                    # S3 清理
                    FILES=$(aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 ls "s3://$S3_BUCKET/" 2>/dev/null | awk '{print $4}' | grep 'landppt_backup_' | sort)
                    COUNT=$(echo "$FILES" | grep -c .)
                    if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
                        DEL=$(($COUNT - $BACKUP_KEEP))
                        echo "$FILES" | head -n "$DEL" | while read -r F; do
                            [ -n "$F" ] && aws --endpoint-url "$S3_ENDPOINT_URL" --region "$S3_REGION" s3 rm "s3://$S3_BUCKET/$F" --quiet
                        done
                    fi
                fi
            fi

            # --- 3. S3 (备) 备份 ---
            if has_s3_2; then
                export AWS_ACCESS_KEY_ID="$S3_2_ACCESS_KEY_ID"
                export AWS_SECRET_ACCESS_KEY="$S3_2_SECRET_ACCESS_KEY"
                export AWS_DEFAULT_REGION="$S3_2_REGION"
                
                if run_with_timeout "$TIMEOUT_CMD" aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 cp "$TMP_BAK" "s3://$S3_2_BUCKET/$BACKUP_NAME" --quiet >/dev/null 2>&1; then
                    # S3 (备) 清理
                    FILES=$(aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 ls "s3://$S3_2_BUCKET/" 2>/dev/null | awk '{print $4}' | grep 'landppt_backup_' | sort)
                    COUNT=$(echo "$FILES" | grep -c .)
                    if [ "$COUNT" -gt "$BACKUP_KEEP" ]; then
                        DEL=$(($COUNT - $BACKUP_KEEP))
                        echo "$FILES" | head -n "$DEL" | while read -r F; do
                            [ -n "$F" ] && aws --endpoint-url "$S3_2_ENDPOINT_URL" --region "$S3_2_REGION" s3 rm "s3://$S3_2_BUCKET/$F" --quiet
                        done
                    fi
                fi
            fi
            
            rm -f "$TMP_BAK"
            log "备份完成: $BACKUP_NAME"
        fi
        sleep "$SYNC_INTERVAL"
    done
) &

exit 0