|
|
import subprocess |
|
|
import os |
|
|
import threading |
|
|
import time |
|
|
import yaml |
|
|
from datetime import datetime |
|
|
import signal |
|
|
import psutil |
|
|
import glob |
|
|
import re |
|
|
import pytz |
|
|
import requests |
|
|
import shutil |
|
|
|
|
|
BACKUP_TIME = os.environ.get('BACKUP_TIME', '1200') |
|
|
HF_USER1 = os.environ.get('HF_USER1', 'jkuiii') |
|
|
HF_REPO = os.environ.get('HF_REPO', 'st13') |
|
|
HF_EMAIL = os.environ.get('HF_EMAIL', 'st13-bf@282820.xyz') |
|
|
HF_TOKEN1 = os.environ.get('HF_TOKEN1', '') |
|
|
|
|
|
HF_USER2 = os.environ.get('HF_USER2', '') |
|
|
HF_ID = os.environ.get('HF_ID', '') |
|
|
HF_TOKON2 = os.environ.get('HF_TOKON2', '') |
|
|
|
|
|
|
|
|
|
|
|
def get_latest_local_package(directory, pattern='*.tar.gz'): |
|
|
try: |
|
|
|
|
|
search_pattern = os.path.join(directory, pattern) |
|
|
|
|
|
|
|
|
files = glob.glob(search_pattern) |
|
|
|
|
|
if not files: |
|
|
print("未找到匹配的 nezha-hf 压缩包") |
|
|
return None |
|
|
|
|
|
|
|
|
latest_file = max(files, key=os.path.getmtime) |
|
|
|
|
|
print(f"找到最新的包: {latest_file}") |
|
|
return latest_file |
|
|
|
|
|
except Exception as e: |
|
|
print(f"获取最新包时发生错误: {e}") |
|
|
return None |
|
|
|
|
|
|
|
|
def delete_huggingface_lfs_file(filename, repo_id, token): |
|
|
""" |
|
|
通过Hugging Face API删除LFS文件记录 |
|
|
""" |
|
|
try: |
|
|
|
|
|
url = f"https://huggingface.co/api/models/{repo_id}/lfs-files" |
|
|
headers = { |
|
|
"content-type": "application/json", |
|
|
"Authorization": f"Bearer {token}", |
|
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" |
|
|
} |
|
|
|
|
|
response = requests.get(url, headers=headers) |
|
|
if response.status_code != 200: |
|
|
print(f"查询LFS文件失败: {response.status_code} - {response.text}") |
|
|
return False |
|
|
|
|
|
lfs_files = response.json() |
|
|
|
|
|
|
|
|
file_to_delete = None |
|
|
for file_info in lfs_files: |
|
|
if file_info.get('filename') == filename: |
|
|
file_to_delete = file_info |
|
|
break |
|
|
|
|
|
if not file_to_delete: |
|
|
print(f"未找到对应的LFS文件记录: {filename}") |
|
|
return False |
|
|
|
|
|
|
|
|
file_oid = file_to_delete['fileOid'] |
|
|
delete_url = f"https://huggingface.co/api/models/{repo_id}/lfs-files/{file_oid}?rewriteHistory=true" |
|
|
|
|
|
delete_response = requests.delete(delete_url, headers=headers) |
|
|
if delete_response.status_code == 200: |
|
|
print(f"成功删除LFS文件记录: {filename} (OID: {file_oid})") |
|
|
return True |
|
|
else: |
|
|
print(f"删除LFS文件记录失败: {delete_response.status_code} - {delete_response.text}") |
|
|
return False |
|
|
|
|
|
except Exception as e: |
|
|
print(f"删除LFS文件记录时出错: {e}") |
|
|
return False |
|
|
|
|
|
|
|
|
def safe_git_cleanup(repo_path, files_to_remove): |
|
|
""" |
|
|
安全的Git清理,不会影响现有的备份文件 |
|
|
""" |
|
|
try: |
|
|
original_dir = os.getcwd() |
|
|
os.chdir(repo_path) |
|
|
|
|
|
print(f"执行安全Git清理: {files_to_remove}") |
|
|
|
|
|
|
|
|
result = subprocess.run(['git', 'status', '--porcelain'], capture_output=True, text=True) |
|
|
if result.stdout.strip(): |
|
|
print("工作目录有未提交的更改,先提交...") |
|
|
subprocess.run(['git', 'add', '.'], capture_output=True) |
|
|
subprocess.run(['git', 'commit', '-m', '自动提交: 清理前的更改'], capture_output=True) |
|
|
|
|
|
|
|
|
for filename in files_to_remove: |
|
|
if os.path.exists(filename): |
|
|
print(f"从Git索引中删除 {filename} (文件仍保留在工作目录)") |
|
|
subprocess.run(['git', 'rm', '--cached', filename], capture_output=True) |
|
|
else: |
|
|
print(f"文件 {filename} 不存在于工作目录,只清理Git引用") |
|
|
|
|
|
|
|
|
if files_to_remove: |
|
|
subprocess.run(['git', 'commit', '-m', f'清理已删除的文件: {", ".join(files_to_remove)}'], capture_output=True) |
|
|
|
|
|
|
|
|
subprocess.run(['git', 'gc', '--auto'], capture_output=True) |
|
|
subprocess.run(['git', 'lfs', 'prune'], capture_output=True) |
|
|
|
|
|
print("安全Git清理完成") |
|
|
os.chdir(original_dir) |
|
|
return True |
|
|
|
|
|
except Exception as e: |
|
|
print(f"安全Git清理时出错: {e}") |
|
|
os.chdir(original_dir) |
|
|
return False |
|
|
|
|
|
|
|
|
def get_remote_lfs_files(repo_id, token): |
|
|
""" |
|
|
获取远程所有的LFS文件列表 |
|
|
""" |
|
|
try: |
|
|
url = f"https://huggingface.co/api/models/{repo_id}/lfs-files" |
|
|
headers = { |
|
|
"content-type": "application/json", |
|
|
"Authorization": f"Bearer {token}", |
|
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" |
|
|
} |
|
|
|
|
|
response = requests.get(url, headers=headers) |
|
|
if response.status_code == 200: |
|
|
return response.json() |
|
|
else: |
|
|
print(f"获取远程LFS文件失败: {response.status_code}") |
|
|
return [] |
|
|
except Exception as e: |
|
|
print(f"获取远程LFS文件时出错: {e}") |
|
|
return [] |
|
|
|
|
|
|
|
|
def cleanup_orphaned_lfs_references(repo_path, repo_id, token, keep_count=3): |
|
|
""" |
|
|
清理孤儿LFS引用:删除远程存在但本地不存在的文件引用 |
|
|
""" |
|
|
try: |
|
|
original_dir = os.getcwd() |
|
|
os.chdir(repo_path) |
|
|
|
|
|
print("检查孤儿LFS引用...") |
|
|
|
|
|
|
|
|
remote_files = get_remote_lfs_files(repo_id, token) |
|
|
if not remote_files: |
|
|
print("无法获取远程LFS文件列表") |
|
|
return |
|
|
|
|
|
|
|
|
local_files = set(glob.glob('*.tar.gz')) |
|
|
|
|
|
|
|
|
orphaned_files = [] |
|
|
for remote_file in remote_files: |
|
|
filename = remote_file.get('filename') |
|
|
if filename and filename not in local_files: |
|
|
orphaned_files.append(filename) |
|
|
|
|
|
if orphaned_files: |
|
|
print(f"发现孤儿LFS引用: {orphaned_files}") |
|
|
|
|
|
|
|
|
for filename in orphaned_files: |
|
|
print(f"删除孤儿LFS引用: {filename}") |
|
|
delete_huggingface_lfs_file(filename, repo_id, token) |
|
|
|
|
|
print("孤儿LFS引用清理完成") |
|
|
os.chdir(original_dir) |
|
|
|
|
|
except Exception as e: |
|
|
print(f"清理孤儿LFS引用时出错: {e}") |
|
|
os.chdir(original_dir) |
|
|
|
|
|
|
|
|
def compress_folder(folder_path, output_dir, keep_days=3): |
|
|
try: |
|
|
|
|
|
os.makedirs(output_dir, exist_ok=True) |
|
|
|
|
|
|
|
|
china_tz = pytz.timezone('Asia/Shanghai') |
|
|
|
|
|
|
|
|
current_time = datetime.now(china_tz) |
|
|
|
|
|
current_date_str = current_time.strftime('%Y%m%d') |
|
|
|
|
|
|
|
|
timestamp = str(int(current_time.timestamp() * 1000)) |
|
|
output_filename = f'{current_date_str}_{timestamp}.tar.gz' |
|
|
output_path = os.path.join(output_dir, output_filename) |
|
|
|
|
|
print(f"新备份文件名:{output_filename}") |
|
|
|
|
|
|
|
|
existing_archives = glob.glob(os.path.join(output_dir, '*.tar.gz')) |
|
|
|
|
|
|
|
|
def extract_date_and_timestamp(filename): |
|
|
basename = os.path.basename(filename) |
|
|
|
|
|
match = re.search(r'(\d{8})_(\d+)\.tar\.gz$', basename) |
|
|
if match: |
|
|
return match.group(1), int(match.group(2)) |
|
|
|
|
|
|
|
|
match = re.search(r'(\d+)\.tar\.gz$', basename) |
|
|
if match: |
|
|
return current_date_str, int(match.group(1)) |
|
|
|
|
|
return "00000000", 0 |
|
|
|
|
|
|
|
|
files_by_date = {} |
|
|
for archive in existing_archives: |
|
|
date_str, timestamp = extract_date_and_timestamp(archive) |
|
|
if date_str not in files_by_date: |
|
|
files_by_date[date_str] = [] |
|
|
files_by_date[date_str].append((archive, timestamp)) |
|
|
|
|
|
|
|
|
latest_per_date = {} |
|
|
for date_str, files in files_by_date.items(): |
|
|
|
|
|
files.sort(key=lambda x: x[1], reverse=True) |
|
|
|
|
|
latest_per_date[date_str] = files[0][0] |
|
|
|
|
|
|
|
|
for file_path, _ in files[1:]: |
|
|
try: |
|
|
filename = os.path.basename(file_path) |
|
|
print(f"删除同一天的旧备份:{filename}") |
|
|
os.remove(file_path) |
|
|
|
|
|
delete_huggingface_lfs_file(filename, f"{HF_USER1}/{HF_REPO}", HF_TOKEN1) |
|
|
except Exception as e: |
|
|
print(f"删除失败 {file_path}: {e}") |
|
|
|
|
|
|
|
|
dates = list(latest_per_date.keys()) |
|
|
dates.sort(reverse=True) |
|
|
|
|
|
|
|
|
files_to_cleanup = [] |
|
|
if len(dates) >= keep_days: |
|
|
for old_date in dates[keep_days-1:]: |
|
|
file_path = latest_per_date[old_date] |
|
|
filename = os.path.basename(file_path) |
|
|
try: |
|
|
print(f"删除最旧日期的备份:{filename} (日期: {old_date})") |
|
|
os.remove(file_path) |
|
|
files_to_cleanup.append(filename) |
|
|
|
|
|
delete_huggingface_lfs_file(filename, f"{HF_USER1}/{HF_REPO}", HF_TOKEN1) |
|
|
except Exception as e: |
|
|
print(f"删除失败 {file_path}: {e}") |
|
|
|
|
|
|
|
|
if files_to_cleanup: |
|
|
print(f"执行安全Git清理: {files_to_cleanup}") |
|
|
safe_git_cleanup(output_dir, files_to_cleanup) |
|
|
|
|
|
|
|
|
result = subprocess.run( |
|
|
['tar', '-czf', output_path, folder_path], |
|
|
capture_output=True, |
|
|
text=True |
|
|
) |
|
|
|
|
|
if result.returncode == 0: |
|
|
|
|
|
file_size = os.path.getsize(output_path) / 1024 / 1024 |
|
|
|
|
|
|
|
|
china_time = datetime.now(china_tz) |
|
|
formatted_time = china_time.strftime('%Y-%m-%d %H:%M:%S') |
|
|
|
|
|
print(f"压缩成功:{output_path}") |
|
|
print(f"压缩大小:{file_size:.2f} MB") |
|
|
print(f"压缩时间:{formatted_time}") |
|
|
print(f"保留策略:最多保留 {keep_days} 天的备份,每天只保留最新的备份") |
|
|
|
|
|
|
|
|
return f"{os.path.basename(output_path)} MB:{file_size:.2f} MB TIME:{formatted_time}" |
|
|
else: |
|
|
print("压缩失败") |
|
|
print("错误信息:", result.stderr) |
|
|
return None |
|
|
|
|
|
except Exception as e: |
|
|
print(f"压缩出错: {e}") |
|
|
return None |
|
|
|
|
|
|
|
|
def github(type): |
|
|
if type == 1: |
|
|
os.system(f'rm -rf /data/{HF_REPO} /data/ST-server /data/data') |
|
|
print(f'删除已完成: /data/{HF_REPO} /data/ST-server /data/data') |
|
|
if not os.path.exists(f'/data/{HF_REPO}'): |
|
|
git = f"git clone https://{HF_USER1}:{HF_TOKEN1}@huggingface.co/{HF_USER1}/{HF_REPO}" |
|
|
print(git) |
|
|
os.system(git) |
|
|
os.system(f'git config --global user.email "{HF_EMAIL}"') |
|
|
os.system(f'git config --global user.name "{HF_USER1}"') |
|
|
os.system("ls") |
|
|
latest_package = get_latest_local_package(f'/data/{HF_REPO}') |
|
|
print(f"最新压缩包路径: {latest_package}") |
|
|
if latest_package: |
|
|
os.system(f"tar -xzf {latest_package} -C /data") |
|
|
os.system("mv /data/f/ST-server /data/") |
|
|
os.system("mv /data/data/f/ST-server /data/") |
|
|
os.system("rm -rf /data/data /data/f") |
|
|
|
|
|
if type == 2: |
|
|
print(f"开始备份上传HF仓库:{HF_REPO}") |
|
|
os.system("mkdir -p /data/f") |
|
|
|
|
|
|
|
|
if not os.path.exists("/data/ST-server"): |
|
|
print("错误:/data/ST-server 目录不存在!创建空目录作为占位符") |
|
|
|
|
|
os.makedirs("/data/f/ST-server", exist_ok=True) |
|
|
|
|
|
with open("/data/f/ST-server/README_EMPTY_BACKUP.txt", "w") as f: |
|
|
f.write("这是一个空的备份,因为备份时/data/ST-server目录不存在\n") |
|
|
f.write(f"备份时间: {datetime.now(pytz.timezone('Asia/Shanghai')).strftime('%Y-%m-%d %H:%M:%S')}\n") |
|
|
else: |
|
|
|
|
|
os.system("mkdir -p /data/f/ST-server") |
|
|
|
|
|
|
|
|
print("备份SQLite数据库以确保数据完整性...") |
|
|
|
|
|
if os.path.exists("/data/ST-server/database.sqlite"): |
|
|
try: |
|
|
|
|
|
print("复制数据库文件(注意:数据库可能正在使用中,不保证完全一致性)") |
|
|
|
|
|
os.makedirs("/data/f/ST-server", exist_ok=True) |
|
|
|
|
|
shutil.copy2("/data/ST-server/database.sqlite", "/data/f/ST-server/database.sqlite") |
|
|
print("数据库复制完成,但请注意其可能不完全一致") |
|
|
except Exception as e: |
|
|
print(f"复制数据库时出错: {e}") |
|
|
else: |
|
|
print("数据库文件不存在,跳过数据库备份") |
|
|
|
|
|
|
|
|
os.system("find /data/ST-server -maxdepth 1 -not -path '/data/ST-server/logs' -not -path '/data/ST-server/data' -not -path '/data/ST-server' -not -name 'database.sqlite' -exec cp -rf {} /data/f/ST-server/ \;") |
|
|
|
|
|
|
|
|
os.system("find /data/ST-server -maxdepth 1 -type f -not -name 'database.sqlite' -exec cp -f {} /data/f/ST-server/ \;") |
|
|
|
|
|
print("ST-server目录备份完成(排除logs和data目录)") |
|
|
|
|
|
repo_path = f'/data/{HF_REPO}' |
|
|
repo_id = f"{HF_USER1}/{HF_REPO}" |
|
|
|
|
|
|
|
|
os.makedirs(repo_path, exist_ok=True) |
|
|
|
|
|
|
|
|
try: |
|
|
os.chdir(repo_path) |
|
|
except Exception as e: |
|
|
print(f"切换到仓库目录失败: {e}") |
|
|
print(f"创建仓库目录: {repo_path}") |
|
|
os.makedirs(repo_path, exist_ok=True) |
|
|
os.chdir(repo_path) |
|
|
os.system(f'git config --global user.email "{HF_EMAIL}"') |
|
|
os.system(f'git config --global user.name "{HF_USER1}"') |
|
|
|
|
|
|
|
|
cleanup_orphaned_lfs_references(repo_path, repo_id, HF_TOKEN1, keep_count=3) |
|
|
|
|
|
|
|
|
os.system('git lfs prune') |
|
|
os.system('git gc --auto') |
|
|
|
|
|
|
|
|
new_archive_info = compress_folder('/data/f', repo_path, keep_days=3) |
|
|
if new_archive_info: |
|
|
new_archive, file_size_info = new_archive_info.split(' MB:') |
|
|
|
|
|
os.system("pwd") |
|
|
|
|
|
|
|
|
backup_files = glob.glob(os.path.join(repo_path, '*.tar.gz')) |
|
|
if not backup_files: |
|
|
print("仓库中没有备份文件,跳过推送步骤") |
|
|
else: |
|
|
print(f"检测到 {len(backup_files)} 个备份文件,准备推送") |
|
|
|
|
|
|
|
|
print("先拉取远程仓库的最新更改...") |
|
|
pull_result = os.system('git pull origin main') |
|
|
if pull_result != 0: |
|
|
print("拉取远程仓库有冲突,采用强制重置方式解决...") |
|
|
|
|
|
os.system('git reset --hard') |
|
|
|
|
|
os.system('git fetch origin main') |
|
|
os.system('git reset --hard origin/main') |
|
|
|
|
|
|
|
|
os.system(f'git add -A') |
|
|
os.system(f'git commit -m "{file_size_info}"') |
|
|
|
|
|
|
|
|
print("尝试推送到远程仓库...") |
|
|
push_result = os.system('git push origin main') |
|
|
if push_result != 0: |
|
|
print("标准推送失败,尝试强制推送...") |
|
|
|
|
|
force_push_result = os.system('git push -f origin main') |
|
|
if force_push_result != 0: |
|
|
print("强制推送也失败,请手动检查仓库状态") |
|
|
else: |
|
|
print("强制推送成功") |
|
|
else: |
|
|
print("推送成功") |
|
|
|
|
|
|
|
|
os.system('git gc --auto') |
|
|
os.system('git lfs prune') |
|
|
else: |
|
|
print("压缩失败,无法提交") |
|
|
|
|
|
|
|
|
print("清理临时文件夹 /data/f...") |
|
|
os.system("rm -rf /data/f") |
|
|
print("临时文件夹已清理") |
|
|
|
|
|
def _reconstruct_token(partial_token): |
|
|
return partial_token.replace(" ", "") |
|
|
github(1) |
|
|
|