File size: 8,484 Bytes
a2b4376
9766dcc
a2b4376
 
9766dcc
a2b4376
 
9766dcc
 
 
a2b4376
 
9766dcc
 
a2b4376
 
 
9766dcc
 
 
a2b4376
 
 
 
 
 
 
 
 
 
 
 
 
9766dcc
a2b4376
9766dcc
 
 
 
 
 
 
 
a2b4376
 
 
9766dcc
a2b4376
9766dcc
 
 
 
 
a2b4376
 
 
9766dcc
a2b4376
 
9766dcc
 
a2b4376
9766dcc
a2b4376
9766dcc
a2b4376
 
9766dcc
a2b4376
 
 
 
 
 
 
 
 
9766dcc
 
a2b4376
 
 
9766dcc
 
 
 
 
 
 
 
 
 
a2b4376
9766dcc
a2b4376
 
 
9766dcc
a2b4376
 
 
 
 
 
 
 
 
9766dcc
 
a2b4376
9766dcc
 
a2b4376
9766dcc
 
 
 
 
 
 
a2b4376
 
 
 
9766dcc
a2b4376
 
 
 
 
9766dcc
a2b4376
 
 
9766dcc
a2b4376
 
 
9766dcc
 
a2b4376
 
 
9766dcc
 
a2b4376
9766dcc
a2b4376
9766dcc
a2b4376
9766dcc
 
 
 
a2b4376
9766dcc
a2b4376
9766dcc
a2b4376
9766dcc
a2b4376
9766dcc
 
 
 
 
 
a2b4376
 
 
 
9766dcc
a2b4376
 
9766dcc
 
a2b4376
 
9766dcc
 
a2b4376
 
 
9766dcc
a2b4376
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9766dcc
a2b4376
 
9766dcc
a2b4376
 
 
9766dcc
a2b4376
 
 
 
 
 
 
9766dcc
 
a2b4376
 
9766dcc
a2b4376
9766dcc
 
a2b4376
9766dcc
a2b4376
9766dcc
 
a2b4376
9766dcc
a2b4376
9766dcc
 
a2b4376
 
9766dcc
a2b4376
9766dcc
a2b4376
9766dcc
 
 
a2b4376
9766dcc
a2b4376
 
 
 
 
 
9766dcc
 
 
 
 
a2b4376
9766dcc
 
a2b4376
9766dcc
 
 
 
 
 
a2b4376
 
 
9766dcc
 
 
a2b4376
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
#!/bin/bash

# === 日志配置 ===
# 将日志同时输出到标准输出和主进程日志,确保在Space/Docker日志中可见
log_info() {
    MSG="[WebDAV-Backup] [$(date '+%H:%M:%S')] INFO: $1"
    if [ -w /proc/1/fd/1 ]; then echo "$MSG" > /proc/1/fd/1; else echo "$MSG"; fi
}

log_error() {
    MSG="[WebDAV-Backup] [$(date '+%H:%M:%S')] ERROR: $1"
    if [ -w /proc/1/fd/1 ]; then echo "$MSG" > /proc/1/fd/1; else echo "$MSG" >&2; fi
}

# === 1. 检查环境变量 ===
if [[ -z "$WEBDAV_URL" ]] || [[ -z "$WEBDAV_USERNAME" ]] || [[ -z "$WEBDAV_PASSWORD" ]]; then
    log_error "未启用备份功能 - 缺少 WEBDAV_URL, WEBDAV_USERNAME 或 WEBDAV_PASSWORD"
    exit 0
fi

# 处理 WebDAV 路径 (如果 URL 结尾没有 / 则补上)
WEBDAV_URL=${WEBDAV_URL%/}
# 如果设置了子路径
WEBDAV_BACKUP_PATH=${WEBDAV_BACKUP_PATH:-""}
# 移除子路径前后的 /
WEBDAV_BACKUP_PATH=${WEBDAV_BACKUP_PATH#/}
WEBDAV_BACKUP_PATH=${WEBDAV_BACKUP_PATH%/}

if [ -n "$WEBDAV_BACKUP_PATH" ]; then
    FULL_WEBDAV_URL="${WEBDAV_URL}/${WEBDAV_BACKUP_PATH}"
else
    FULL_WEBDAV_URL="${WEBDAV_URL}"
fi

# === 2. 目录设置 ===
TEMP_DIR="/tmp/app_backup"
DATA_DIR="/home/node/app/data"

mkdir -p $TEMP_DIR
chmod -R 777 $TEMP_DIR
mkdir -p $DATA_DIR
chmod -R 777 $DATA_DIR

log_info "临时目录: $TEMP_DIR"
log_info "数据目录: $DATA_DIR"
log_info "WebDAV URL: $FULL_WEBDAV_URL"

# === 3. 安装依赖 ===
if ! command -v python3 > /dev/null 2>&1; then
    log_info "正在安装Python..."
    apk add --no-cache python3 py3-pip
fi

# 确保安装了 webdavclient3 和 requests
log_info "正在安装/更新 WebDAV 依赖..."
pip3 install --no-cache-dir requests webdavclient3
if [ $? -ne 0 ]; then
    log_error "依赖安装失败,尝试重试..."
    pip3 install --no-cache-dir requests webdavclient3
fi

# === 4. 测试连接 ===
log_info "正在测试 WebDAV 连接..."
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" -X PROPFIND "$FULL_WEBDAV_URL/")

if [[ "$HTTP_CODE" == "207" ]] || [[ "$HTTP_CODE" == "200" ]]; then
    log_info "WebDAV 连接成功 (HTTP $HTTP_CODE)"
else
    log_error "WebDAV 连接失败 (HTTP $HTTP_CODE),正在尝试创建目录..."
    # 尝试创建目录
    MKCOL_CODE=$(curl -s -o /dev/null -w "%{http_code}" -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" -X MKCOL "$FULL_WEBDAV_URL/")
    if [[ "$MKCOL_CODE" == "201" ]] || [[ "$MKCOL_CODE" == "200" ]]; then
         log_info "远程目录创建成功"
    else
         log_error "无法连接也无法创建目录,请检查配置。HTTP: $MKCOL_CODE"
         # 不退出,继续尝试,也许是根目录权限问题
    fi
fi

# === 5. 定义功能函数 ===

# 上传并清理旧备份
upload_backup() {
    file_path="$1"
    file_name="$2"
    
    if [ ! -f "$file_path" ]; then
        log_error "备份文件不存在: $file_path"
        return 1
    fi
    
    file_size=$(du -h "$file_path" | cut -f1)
    log_info "开始上传: $file_name ($file_size)"
    
    # 1. 使用 CURL 上传 (比 Python 更稳定且显示进度)
    # 使用 -T 上传文件
    HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" -u "$WEBDAV_USERNAME:$WEBDAV_PASSWORD" -T "$file_path" "$FULL_WEBDAV_URL/$file_name")
    
    if [[ "$HTTP_CODE" == "201" ]] || [[ "$HTTP_CODE" == "204" ]] || [[ "$HTTP_CODE" == "200" ]]; then
        log_info "上传成功 (HTTP $HTTP_CODE)"
    else
        log_error "上传失败 (HTTP $HTTP_CODE)"
        return 1
    fi

    # 2. 使用 Python 清理旧文件 (保留最近5份)
    log_info "正在检查旧备份..."
    python3 -c "
import sys
from webdav3.client import Client

options = {
    'webdav_hostname': '$FULL_WEBDAV_URL',
    'webdav_login': '$WEBDAV_USERNAME',
    'webdav_password': '$WEBDAV_PASSWORD'
}

try:
    client = Client(options)
    # 获取文件列表
    files = client.list()
    # 筛选备份文件
    backups = [f for f in files if f.endswith('.tar.gz') and f.startswith('app_backup_')]
    backups.sort()
    
    # 保留最近 5 个
    MAX_BACKUPS = 5
    if len(backups) > MAX_BACKUPS:
        to_delete = backups[:len(backups) - MAX_BACKUPS]
        for file_name in to_delete:
            try:
                # webdav3 的 list 返回的可能是相对路径,clean 需要正确路径
                client.clean(file_name)
                print(f'已删除旧备份: {file_name}')
            except Exception as e:
                print(f'删除 {file_name} 失败: {str(e)}')
    else:
        print(f'当前备份数量 ({len(backups)}) 未超过限制,无需清理')

except Exception as e:
    print(f'清理旧备份时出错: {str(e)}')
"
    return 0
}

# 下载最新备份
download_latest_backup() {
    log_info "正在检查远程备份..."
    
    # 使用 Python 处理复杂的 WebDAV 列表和下载逻辑
    python3 -c "
import sys
import os
import tarfile
import requests
import shutil
from webdav3.client import Client

# 配置
options = {
    'webdav_hostname': '$FULL_WEBDAV_URL',
    'webdav_login': '$WEBDAV_USERNAME',
    'webdav_password': '$WEBDAV_PASSWORD'
}

try:
    client = Client(options)
    # 1. 列出文件
    files = client.list()
    # 筛选备份
    backups = [f for f in files if f.endswith('.tar.gz') and f.startswith('app_backup_')]
    
    if not backups:
        print('未找到远程备份文件,跳过恢复')
        sys.exit(0)
        
    # 2. 找到最新的
    latest_backup = sorted(backups)[-1]
    print(f'发现最新备份: {latest_backup}')
    
    # 3. 下载
    local_path = os.path.join('$TEMP_DIR', latest_backup)
    download_url = f'$FULL_WEBDAV_URL/{latest_backup}'
    
    print(f'开始下载...')
    # 使用 requests 流式下载
    with requests.get(download_url, auth=('$WEBDAV_USERNAME', '$WEBDAV_PASSWORD'), stream=True) as r:
        r.raise_for_status()
        with open(local_path, 'wb') as f:
            for chunk in r.iter_content(chunk_size=8192):
                f.write(chunk)
                
    print(f'下载完成: {local_path}')
    
    # 4. 恢复
    target_dir = '$DATA_DIR'
    if os.path.exists(local_path):
        # 确保目录存在
        os.makedirs(target_dir, exist_ok=True)
        
        # 检查是否需要清空现有数据 (通常首次启动是空的,但以防万一)
        # 这里为了安全,我们覆盖解压,而不是先删除 rm -rf
        
        print(f'正在解压到 {target_dir}...')
        with tarfile.open(local_path, 'r:gz') as tar:
            tar.extractall(target_dir)
            
        print('备份恢复成功!')
        
        # 清理下载的临时文件
        os.remove(local_path)
    else:
        print('下载的文件丢失,恢复失败')
        sys.exit(1)

except Exception as e:
    print(f'恢复备份过程中出错: {str(e)}')
    # 不退出脚本,允许继续运行,只是恢复失败
    sys.exit(1)
"
}

# === 6. 主流程 ===

# 首次启动时尝试恢复
download_latest_backup

# === 7. 循环同步 ===
sync_data() {
    log_info "数据同步循环服务已启动"
    
    while true; do
        # 默认 6 小时同步一次 (21600秒),避免 WebDAV 拥堵
        SYNC_INTERVAL=${SYNC_INTERVAL:-21600}
        
        # 检查目录
        if [ -d "$DATA_DIR" ]; then
            # 检查文件数量
            file_count=$(find "$DATA_DIR" -type f | wc -l)
            
            if [ "$file_count" -eq 0 ]; then
                log_info "数据目录为空 ($file_count 文件),跳过备份"
            else
                log_info "开始创建备份 (文件数: $file_count)..."
                
                timestamp=$(date +%Y%m%d_%H%M%S)
                backup_file="app_backup_${timestamp}.tar.gz"
                backup_path="${TEMP_DIR}/${backup_file}"
                
                # 压缩
                tar -czf "$backup_path" -C "$DATA_DIR" .
                if [ $? -eq 0 ]; then
                    # 上传
                    upload_backup "$backup_path" "$backup_file"
                    # 本地清理
                    rm -f "$backup_path"
                else
                    log_error "压缩文件创建失败"
                fi
            fi
        else
            log_error "数据目录不存在: $DATA_DIR"
            mkdir -p "$DATA_DIR"
        fi
        
        log_info "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
        sleep $SYNC_INTERVAL
    done
}

# 启动同步循环
sync_data