flzta commited on
Commit
a5a6d82
·
verified ·
1 Parent(s): d363dec

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +34 -116
Dockerfile CHANGED
@@ -1,131 +1,49 @@
1
- #!/bin/sh
2
 
3
- # 检查环境变量
4
- if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
- echo "Starting without backup functionality - missing HF_TOKEN or DATASET_ID"
6
- exec ./cloudreve
7
- exit 0
8
- fi
9
 
10
- # 激活虚拟环境
11
- . /opt/venv/bin/activate
12
 
13
- # 上传备份
14
- upload_backup() {
15
- file_path="$1"
16
- file_name="$2"
17
 
18
- python3 -c "
19
- from huggingface_hub import HfApi
20
- import sys
21
- import os
22
- def manage_backups(api, repo_id, max_files=50):
23
- files = api.list_repo_files(repo_id=repo_id, repo_type='dataset')
24
- backup_files = [f for f in files if f.startswith('cloudreve_backup_') and f.endswith('.tar.gz')]
25
- backup_files.sort()
26
 
27
- if len(backup_files) >= max_files:
28
- files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
29
- for file_to_delete in files_to_delete:
30
- try:
31
- api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type='dataset')
32
- print(f'Deleted old backup: {file_to_delete}')
33
- except Exception as e:
34
- print(f'Error deleting {file_to_delete}: {str(e)}')
35
- api = HfApi(token='$HF_TOKEN')
36
- try:
37
- api.upload_file(
38
- path_or_fileobj='$file_path',
39
- path_in_repo='$file_name',
40
- repo_id='$DATASET_ID',
41
- repo_type='dataset'
42
- )
43
- print(f'Successfully uploaded $file_name')
44
 
45
- manage_backups(api, '$DATASET_ID')
46
- except Exception as e:
47
- print(f'Error uploading file: {str(e)}')
48
- "
49
- }
50
 
51
- # 下载最新备份
52
- download_latest_backup() {
53
- python3 -c "
54
- from huggingface_hub import HfApi
55
- import sys
56
- import os
57
- import tarfile
58
- import tempfile
59
- api = HfApi(token='$HF_TOKEN')
60
- try:
61
- files = api.list_repo_files(repo_id='$DATASET_ID', repo_type='dataset')
62
- backup_files = [f for f in files if f.startswith('cloudreve_backup_') and f.endswith('.tar.gz')]
63
 
64
- if not backup_files:
65
- print('No backup files found on HuggingFace Dataset')
66
- return # Changed sys.exit() to return, so the script continues
67
 
68
- latest_backup = sorted(backup_files)[-1]
 
69
 
70
- with tempfile.TemporaryDirectory() as temp_dir:
71
- filepath = api.hf_hub_download(
72
- repo_id='$DATASET_ID',
73
- filename=latest_backup,
74
- repo_type='dataset',
75
- local_dir=temp_dir
76
- )
77
 
78
- if filepath and os.path.exists(filepath):
79
- print(f'Found latest backup: {latest_backup}, attempting to restore...')
80
- # 删除现有的数据目录
81
- if [ -d /opt/cloudreve/data ]; then
82
- echo "Deleting existing data directory: /opt/cloudreve/data"
83
- rm -rf /opt/cloudreve/data
84
- fi
85
- # 创建数据目录(如果被删除)
86
- mkdir -p /opt/cloudreve/data
87
 
88
- with tarfile.open(filepath, 'r:gz') as tar:
89
- tar.extractall('/opt/cloudreve/data')
90
- echo f'Successfully restored backup from {latest_backup}'
91
 
92
- except Exception as e:
93
- echo f'Error downloading backup from HuggingFace Dataset: {e}'
94
- "
95
- }
 
96
 
97
- # 首次启动时下载最新备份和后续每次启动时都尝试恢复
98
- echo "Attempting to download and restore latest backup from HuggingFace Dataset..."
99
- download_latest_backup
100
 
101
- # 同步函数
102
- sync_data() {
103
- while true; do
104
- echo "Starting sync process at $(date)"
105
-
106
- if [ -d /opt/cloudreve/data ]; then
107
- timestamp=$(date +%Y%m%d_%H%M%S)
108
- backup_file="cloudreve_backup_${timestamp}.tar.gz"
109
-
110
- # 压缩数据目录
111
- tar -czf "/tmp/${backup_file}" -C /opt/cloudreve/data .
112
-
113
- echo "Uploading backup to HuggingFace Dataset..."
114
- upload_backup "/tmp/${backup_file}" "${backup_file}"
115
-
116
- rm -f "/tmp/${backup_file}"
117
- else
118
- echo "Data directory does not exist yet, waiting for next sync..."
119
- fi
120
-
121
- SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
122
- echo "Next sync in ${SYNC_INTERVAL} seconds..."
123
- sleep $SYNC_INTERVAL
124
- done
125
- }
126
-
127
- # 后台启动同步进程
128
- sync_data &
129
-
130
- # 启动 Cloudreve
131
- exec ./cloudreve
 
1
+ FROM alpine:latest
2
 
3
+ WORKDIR /opt/cloudreve
 
 
 
 
 
4
 
5
+ ENV TZ=Asia/Shanghai
 
6
 
7
+ # 安装必要的依赖
8
+ RUN apk update && apk add --no-cache wget tar gzip python3 py3-pip curl bash git make g++ build-base
 
 
9
 
10
+ # 创建 Python 虚拟环境并安装 huggingface_hub
11
+ ENV VIRTUAL_ENV=/opt/venv
12
+ RUN python3 -m venv $VIRTUAL_ENV
13
+ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
14
+ RUN pip install --no-cache-dir huggingface_hub
 
 
 
15
 
16
+ # 下载 Cloudreve 3.8.3 预构建二进制文件
17
+ RUN wget https://github.com/cloudreve/Cloudreve/releases/download/3.8.3/cloudreve_3.8.3_linux_amd64.tar.gz
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
+ # 解压 Cloudreve
20
+ RUN tar -zxvf cloudreve_3.8.3_linux_amd64.tar.gz
 
 
 
21
 
22
+ # 创建数据目录
23
+ RUN mkdir -p /opt/cloudreve/data
24
+ RUN chmod -R 755 /opt/cloudreve
 
 
 
 
 
 
 
 
 
25
 
26
+ # 将用户 www-data 添加到已存在的组
27
+ RUN adduser -u 1000 -D -S -G www-data www-data
 
28
 
29
+ # 更改 /opt/cloudreve 目录的所有权为 www-data 用户
30
+ RUN chown -R www-data:www-data /opt/cloudreve
31
 
32
+ # 复制同步脚本
33
+ COPY sync_data.sh /
 
 
 
 
 
34
 
35
+ RUN chmod +x /sync_data.sh
36
+ RUN chown www-data:www-data /sync_data.sh
 
 
 
 
 
 
 
37
 
38
+ USER www-data
39
+ WORKDIR /opt/cloudreve
 
40
 
41
+ ENV NODE_ENV=production
42
+ ENV HOST=0.0.0.0
43
+ # ENV ENABLE_AUTH=1 # 如果需要认证,可以启用
44
+ # ENV SERVER_SECRET=your-secret-key # 设置您的密钥
45
+ # ENV SERVER_PASS=your-password # 设置您的密码
46
 
47
+ EXPOSE 5212
 
 
48
 
49
+ CMD ["/bin/sh", "-c", "/sync_data.sh"]