data / sync_data.sh
flzta's picture
Update sync_data.sh
c2e15e4 verified
raw
history blame
4.52 kB
#!/bin/bash
# 检查 Hugging Face Token 和 Dataset ID 环境变量
if [[ -z "$HF_TOKEN" ]] || [[ -z "$DATASET_ID" ]]; then
echo "Starting Cloudreve without backup functionality - missing HF_TOKEN or DATASET_ID"
exec /opt/cloudreve/cloudreve -c /opt/cloudreve/config.ini
exit 0
fi
# 定义 Cloudreve 数据目录和配置文件路径
DATA_DIR="/opt/cloudreve/data"
CONFIG_FILE="/opt/cloudreve/config.ini"
BACKUP_PREFIX="cloudreve_backup"
# 激活 Python 虚拟环境
if [ -f "/opt/venv/bin/activate" ]; then
source /opt/venv/bin/activate
fi
# Python 函数:上传备份
upload_backup() {
local file_path="$1"
local file_name="$2"
local token="$HF_TOKEN"
local repo_id="$DATASET_ID"
python3 -c "
from huggingface_hub import HfApi
import sys
import os
import glob
def manage_backups(api, repo_id, max_files=5):
files = api.list_repo_files(repo_id=repo_id, repo_type='dataset')
backup_files = [f for f in files if f.startswith('$BACKUP_PREFIX') and f.endswith('.tar.gz')]
backup_files.sort()
if len(backup_files) >= max_files:
files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
for file_to_delete in files_to_delete:
try:
api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type='dataset')
print(f'Deleted old backup: {file_to_delete}')
except Exception as e:
print(f'Error deleting {file_to_delete}: {str(e)}')
api = HfApi(token='$token')
try:
api.upload_file(
path_or_fileobj='$file_path',
path_in_repo='$file_name',
repo_id='$repo_id',
repo_type='dataset'
)
print(f'Successfully uploaded $file_name')
manage_backups(api, '$repo_id')
except Exception as e:
print(f'Error uploading file: {str(e)}')
"
}
# Python 函数:下载最新备份
download_latest_backup() {
local token="$HF_TOKEN"
local repo_id="$DATASET_ID"
python3 -c "
from huggingface_hub import HfApi
import sys
import os
import tarfile
import tempfile
import glob
api = HfApi(token='$token')
try:
files = api.list_repo_files(repo_id='$repo_id', repo_type='dataset')
backup_files = [f for f in files if f.startswith('$BACKUP_PREFIX') and f.endswith('.tar.gz')]
if not backup_files:
print('No backup files found.')
sys.exit()
latest_backup = sorted(backup_files)[-1]
with tempfile.TemporaryDirectory() as temp_dir:
filepath = api.hf_hub_download(
repo_id='$repo_id',
filename=latest_backup,
repo_type='dataset',
local_dir=temp_dir
)
if filepath and os.path.exists(filepath):
# 删除现有的数据目录和配置文件
if [ -d \"$DATA_DIR\" ]; then
echo \"Deleting existing data directory: $DATA_DIR\"
rm -rf \"$DATA_DIR\"
fi
if [ -f \"$CONFIG_FILE\" ]; then
echo \"Deleting existing config file: $CONFIG_FILE\"
rm -rf \"$CONFIG_FILE\"
fi
mkdir -p \"$DATA_DIR\"
with tarfile.open(filepath, 'r:gz') as tar:
tar.extractall(\"/opt/cloudreve\") # 将备份恢复到 /opt/cloudreve 目录
echo f'Successfully restored backup from {latest_backup}'
except Exception as e:
print(f'Error downloading backup: {str(e)}')
"
}
# 首次启动时下载最新备份
echo "Downloading latest backup from HuggingFace..."
download_latest_backup
# 后台启动同步进程
sync_data() {
while true; do
echo "Starting sync process at $(date)"
if [ -d "$DATA_DIR" ]; then
timestamp=$(date +%Y%m%d_%H%M%S)
backup_file="${BACKUP_PREFIX}_${timestamp}.tar.gz"
backup_path="/tmp/${backup_file}"
echo "Compressing data directory and config file..."
tar -czf "$backup_path" -C /opt/cloudreve cloudreve config.ini
echo "Uploading backup to HuggingFace..."
upload_backup "$backup_path" "${backup_file}"
rm -f "$backup_path"
else
echo "Data directory does not exist yet, waiting for next sync..."
fi
SYNC_INTERVAL=${SYNC_INTERVAL:-3600} # 默认同步间隔为 1 小时
echo "Next sync in ${SYNC_INTERVAL} seconds..."
sleep $SYNC_INTERVAL
done
}
sync_data &
# 启动 Cloudreve
echo "Starting Cloudreve..."
exec /opt/cloudreve/cloudreve -c /opt/cloudreve/config.ini