File size: 3,238 Bytes
461a060
 
 
 
 
 
 
 
 
 
 
 
caaafa8
0481745
 
 
 
 
 
 
 
caaafa8
 
 
 
0481745
 
 
 
 
461a060
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0481745
 
461a060
 
 
 
 
 
0481745
461a060
 
 
 
 
 
 
 
 
 
0481745
 
461a060
0481745
461a060
 
 
 
 
 
 
 
 
 
 
 
0481745
461a060
 
 
 
0481745
461a060
 
 
 
 
 
 
0481745
461a060
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
#!/bin/sh

# 检查环境变量
if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
    echo "Starting without backup functionality - missing HF_TOKEN or DATASET_ID"
    exec node ./src/app/app.js
    exit 0
fi

# 激活虚拟环境
. /opt/venv/bin/activate

# 备份管理函数(删除旧备份并重置 Git 历史)
manage_backups() {
    python3 -c "
from huggingface_hub import HfApi
api = HfApi(token='$HF_TOKEN')
try:
    files = api.list_repo_files(repo_id='$DATASET_ID', repo_type='dataset')
    backup_files = sorted([f for f in files if f.startswith('electerm_backup_') and f.endswith('.tar.gz')])
    backup_count = int('$DATASET_N') if '$DATASET_N'.isdigit() else 5
    if len(backup_files) > backup_count:
        for file in backup_files[:-backup_count]:
            api.delete_file(path_in_repo=file, repo_id='$DATASET_ID', repo_type='dataset')
            print(f'Deleted old backup: {file}')
except Exception as e:
    print(f'Error managing backups: {str(e)}')
"
}

# 上传备份
upload_backup() {
    file_path="$1"
    file_name="$2"
    
    python3 -c "
from huggingface_hub import HfApi
api = HfApi(token='$HF_TOKEN')
try:
    api.upload_file(
        path_or_fileobj='$file_path',
        path_in_repo='$file_name',
        repo_id='$DATASET_ID',
        repo_type='dataset'
    )
    print(f'Successfully uploaded $file_name')
except Exception as e:
    print(f'Error uploading file: {str(e)}')
"
    
    manage_backups
}

# 下载最新备份
download_latest_backup() {
    python3 -c "
from huggingface_hub import HfApi
import tarfile, tempfile, os, sys
api = HfApi(token='$HF_TOKEN')
try:
    files = api.list_repo_files(repo_id='$DATASET_ID', repo_type='dataset')
    backup_files = [f for f in files if f.startswith('electerm_backup_') and f.endswith('.tar.gz')]
    if not backup_files:
        print('No backup files found')
        sys.exit()
    latest_backup = sorted(backup_files)[-1]
    with tempfile.TemporaryDirectory() as temp_dir:
        filepath = api.hf_hub_download(
            repo_id='$DATASET_ID', filename=latest_backup,
            repo_type='dataset', local_dir=temp_dir
        )
        if os.path.exists(filepath):
            with tarfile.open(filepath, 'r:gz') as tar:
                tar.extractall('/app/electerm-web/data')
            print(f'Successfully restored backup from {latest_backup}')
except Exception as e:
    print(f'Error downloading backup: {str(e)}')
"
}

# 首次启动时下载最新备份
echo "Downloading latest backup from HuggingFace..."
download_latest_backup

# 数据同步函数
sync_data() {
    while true; do
        echo "Starting sync process at $(date)"
        if [ -d /app/electerm-web/data ]; then
            backup_file="electerm_backup_$(date +%Y%m%d_%H%M%S).tar.gz"
            tar -czf "/tmp/${backup_file}" -C /app/electerm-web/data .
            echo "Uploading backup to HuggingFace..."
            upload_backup "/tmp/${backup_file}" "${backup_file}"
            rm -f "/tmp/${backup_file}"
        else
            echo "Data directory does not exist yet, waiting for next sync..."
        fi
        sleep ${SYNC_INTERVAL:-7200}
    done
}

# 后台启动同步进程
sync_data &

# 启动 Electerm
exec node ./src/app/app.js