| | import subprocess |
| | import os |
| | import threading |
| | import time |
| | import yaml |
| | from datetime import datetime |
| | import signal |
| | import psutil |
| | import glob |
| | import re |
| | import pytz |
| | import requests |
| |
|
| | BACKUP_TIME = os.environ.get('BACKUP_TIME', '1200') |
| | HF_USER1 = os.environ.get('HF_USER1', '') |
| | HF_REPO = os.environ.get('HF_REPO', '') |
| | HF_EMAIL = os.environ.get('HF_EMAIL', '') |
| | HF_TOKEN1 = os.environ.get('HF_TOKEN1', '') |
| |
|
| | HF_USER2 = os.environ.get('HF_USER2', '') |
| | HF_ID = os.environ.get('HF_ID', '') |
| | HF_TOKON2 = os.environ.get('HF_TOKON2', '') |
| |
|
| |
|
| |
|
| | def get_latest_local_package(directory, pattern='*.tar.gz'): |
| | try: |
| | |
| | search_pattern = os.path.join(directory, pattern) |
| | |
| | |
| | files = glob.glob(search_pattern) |
| | |
| | if not files: |
| | print("未找到匹配的 nezha-hf 压缩包") |
| | return None |
| | |
| | |
| | latest_file = max(files, key=os.path.getmtime) |
| | |
| | print(f"找到最新的包: {latest_file}") |
| | return latest_file |
| | |
| | except Exception as e: |
| | print(f"获取最新包时发生错误: {e}") |
| | return None |
| |
|
| |
|
| | def delete_huggingface_lfs_file(filename, repo_id, token): |
| | """ |
| | 通过Hugging Face API删除LFS文件记录 |
| | """ |
| | try: |
| | |
| | url = f"https://huggingface.co/api/models/{repo_id}/lfs-files" |
| | headers = { |
| | "content-type": "application/json", |
| | "Authorization": f"Bearer {token}", |
| | "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" |
| | } |
| | |
| | response = requests.get(url, headers=headers) |
| | if response.status_code != 200: |
| | print(f"查询LFS文件失败: {response.status_code} - {response.text}") |
| | return False |
| | |
| | lfs_files = response.json() |
| | |
| | |
| | file_to_delete = None |
| | for file_info in lfs_files: |
| | if file_info.get('filename') == filename: |
| | file_to_delete = file_info |
| | break |
| | |
| | if not file_to_delete: |
| | print(f"未找到对应的LFS文件记录: {filename}") |
| | return False |
| | |
| | |
| | file_oid = file_to_delete['fileOid'] |
| | delete_url = f"https://huggingface.co/api/models/{repo_id}/lfs-files/{file_oid}?rewriteHistory=true" |
| | |
| | delete_response = requests.delete(delete_url, headers=headers) |
| | if delete_response.status_code == 200: |
| | print(f"成功删除LFS文件记录: {filename} (OID: {file_oid})") |
| | return True |
| | else: |
| | print(f"删除LFS文件记录失败: {delete_response.status_code} - {delete_response.text}") |
| | return False |
| | |
| | except Exception as e: |
| | print(f"删除LFS文件记录时出错: {e}") |
| | return False |
| |
|
| |
|
| | def safe_git_cleanup(repo_path, files_to_remove): |
| | """ |
| | 安全的Git清理,不会影响现有的备份文件 |
| | """ |
| | try: |
| | original_dir = os.getcwd() |
| | os.chdir(repo_path) |
| | |
| | print(f"执行安全Git清理: {files_to_remove}") |
| | |
| | |
| | result = subprocess.run(['git', 'status', '--porcelain'], capture_output=True, text=True) |
| | if result.stdout.strip(): |
| | print("工作目录有未提交的更改,先提交...") |
| | subprocess.run(['git', 'add', '.'], capture_output=True) |
| | subprocess.run(['git', 'commit', '-m', '自动提交: 清理前的更改'], capture_output=True) |
| | |
| | |
| | for filename in files_to_remove: |
| | if os.path.exists(filename): |
| | print(f"从Git索引中删除 {filename} (文件仍保留在工作目录)") |
| | subprocess.run(['git', 'rm', '--cached', filename], capture_output=True) |
| | else: |
| | print(f"文件 {filename} 不存在于工作目录,只清理Git引用") |
| | |
| | |
| | if files_to_remove: |
| | subprocess.run(['git', 'commit', '-m', f'清理已删除的文件: {", ".join(files_to_remove)}'], capture_output=True) |
| | |
| | |
| | subprocess.run(['git', 'gc', '--auto'], capture_output=True) |
| | subprocess.run(['git', 'lfs', 'prune'], capture_output=True) |
| | |
| | print("安全Git清理完成") |
| | os.chdir(original_dir) |
| | return True |
| | |
| | except Exception as e: |
| | print(f"安全Git清理时出错: {e}") |
| | os.chdir(original_dir) |
| | return False |
| |
|
| |
|
| | def get_remote_lfs_files(repo_id, token): |
| | """ |
| | 获取远程所有的LFS文件列表 |
| | """ |
| | try: |
| | url = f"https://huggingface.co/api/models/{repo_id}/lfs-files" |
| | headers = { |
| | "content-type": "application/json", |
| | "Authorization": f"Bearer {token}", |
| | "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" |
| | } |
| | |
| | response = requests.get(url, headers=headers) |
| | if response.status_code == 200: |
| | return response.json() |
| | else: |
| | print(f"获取远程LFS文件失败: {response.status_code}") |
| | return [] |
| | except Exception as e: |
| | print(f"获取远程LFS文件时出错: {e}") |
| | return [] |
| |
|
| |
|
| | def cleanup_orphaned_lfs_references(repo_path, repo_id, token, keep_count=3): |
| | """ |
| | 清理孤儿LFS引用:删除远程存在但本地不存在的文件引用 |
| | """ |
| | try: |
| | original_dir = os.getcwd() |
| | os.chdir(repo_path) |
| | |
| | print("检查孤儿LFS引用...") |
| | |
| | |
| | remote_files = get_remote_lfs_files(repo_id, token) |
| | if not remote_files: |
| | print("无法获取远程LFS文件列表") |
| | return |
| | |
| | |
| | local_files = set(glob.glob('*.tar.gz')) |
| | |
| | |
| | orphaned_files = [] |
| | for remote_file in remote_files: |
| | filename = remote_file.get('filename') |
| | if filename and filename not in local_files: |
| | orphaned_files.append(filename) |
| | |
| | if orphaned_files: |
| | print(f"发现孤儿LFS引用: {orphaned_files}") |
| | |
| | |
| | for filename in orphaned_files: |
| | print(f"删除孤儿LFS引用: {filename}") |
| | delete_huggingface_lfs_file(filename, repo_id, token) |
| | |
| | print("孤儿LFS引用清理完成") |
| | os.chdir(original_dir) |
| | |
| | except Exception as e: |
| | print(f"清理孤儿LFS引用时出错: {e}") |
| | os.chdir(original_dir) |
| |
|
| |
|
| | def compress_folder(folder_path, output_dir, keep_count=3): |
| | try: |
| | |
| | os.makedirs(output_dir, exist_ok=True) |
| | |
| | |
| | china_tz = pytz.timezone('Asia/Shanghai') |
| | |
| | |
| | timestamp = str(int(datetime.now(china_tz).timestamp() * 1000)) |
| | output_path = os.path.join(output_dir, f'{timestamp}.tar.gz') |
| | |
| | |
| | existing_archives = glob.glob(os.path.join(output_dir, '*.tar.gz')) |
| | |
| | |
| | def extract_timestamp(filename): |
| | match = re.search(r'(\d+)\.tar\.gz$', filename) |
| | return int(match.group(1)) if match else 0 |
| | |
| | files_to_cleanup = [] |
| | |
| | |
| | if len(existing_archives) >= keep_count: |
| | |
| | existing_archives.sort(key=extract_timestamp) |
| | |
| | |
| | delete_count = len(existing_archives) - keep_count + 1 |
| | |
| | |
| | for i in range(delete_count): |
| | oldest_archive = existing_archives[i] |
| | oldest_filename = os.path.basename(oldest_archive) |
| | |
| | try: |
| | |
| | os.remove(oldest_archive) |
| | print(f"删除最旧的压缩包:{oldest_filename}") |
| | |
| | |
| | files_to_cleanup.append(oldest_filename) |
| | |
| | |
| | print(f"正在删除Hugging Face LFS文件记录: {oldest_filename}") |
| | delete_huggingface_lfs_file(oldest_filename, f"{HF_USER1}/{HF_REPO}", HF_TOKEN1) |
| | |
| | except Exception as e: |
| | print(f"删除失败 {oldest_archive}: {e}") |
| | |
| | |
| | if files_to_cleanup: |
| | print(f"执行安全Git清理: {files_to_cleanup}") |
| | safe_git_cleanup(output_dir, files_to_cleanup) |
| | |
| | |
| | result = subprocess.run( |
| | ['tar', '-czf', output_path, folder_path], |
| | capture_output=True, |
| | text=True |
| | ) |
| | |
| | if result.returncode == 0: |
| | |
| | file_size = os.path.getsize(output_path) / 1024 / 1024 |
| | |
| | |
| | china_time = datetime.now(china_tz) |
| | formatted_time = china_time.strftime('%Y-%m-%d %H:%M:%S') |
| | |
| | print(f"压缩成功:{output_path}") |
| | print(f"压缩大小:{file_size:.2f} MB") |
| | print(f"压缩时间:{formatted_time}") |
| | print(f"保留策略:最多保留 {keep_count} 个备份包") |
| | |
| | |
| | return f"{os.path.basename(output_path)} MB:{file_size:.2f} MB TIME:{formatted_time}" |
| | else: |
| | print("压缩失败") |
| | print("错误信息:", result.stderr) |
| | return None |
| | |
| | except Exception as e: |
| | print(f"压缩出错: {e}") |
| | return None |
| |
|
| |
|
| | def github(type): |
| | if type == 1: |
| | os.system(f'rm -rf /data/{HF_REPO} /data/ff /data/data') |
| | if not os.path.exists(f'/data/{HF_REPO}'): |
| | git = f"git clone https://{HF_USER1}:{HF_TOKEN1}@huggingface.co/{HF_USER1}/{HF_REPO}" |
| | print(git) |
| | os.system(git) |
| | os.system(f'git config --global user.email "{HF_EMAIL}"') |
| | os.system(f'git config --global user.name "{HF_USER1}"') |
| | os.system("ls") |
| | latest_package = get_latest_local_package(f'/data/{HF_REPO}') |
| | print(f"最新压缩包路径: {latest_package}") |
| | if latest_package: |
| | os.system(f"tar -xzf {latest_package} -C /data") |
| | os.system("mv /data/f/ff /data/") |
| | os.system("mv /data/data/f/ff /data/") |
| | os.system("rm -rf /data/data /data/f") |
| | |
| | if type == 2: |
| | print(f"开始备份上传HF仓库:{HF_REPO}") |
| | os.system("mkdir -p /data/f") |
| | os.system("cp -rf /data/ff /data/f") |
| | |
| | repo_path = f'/data/{HF_REPO}' |
| | repo_id = f"{HF_USER1}/{HF_REPO}" |
| | |
| | |
| | os.chdir(repo_path) |
| | os.system(f'git config --global user.email "{HF_EMAIL}"') |
| | os.system(f'git config --global user.name "{HF_USER1}"') |
| | |
| | |
| | cleanup_orphaned_lfs_references(repo_path, repo_id, HF_TOKEN1, keep_count=3) |
| | |
| | |
| | os.system('git lfs prune') |
| | os.system('git gc --auto') |
| | |
| | |
| | new_archive_info = compress_folder('/data/f', repo_path, keep_count=3) |
| | if new_archive_info: |
| | new_archive, file_size_info = new_archive_info.split(' MB:') |
| | |
| | os.system("pwd") |
| | os.system(f'git add .') |
| | os.system(f'git commit -m "{file_size_info}"') |
| | |
| | |
| | push_result = os.system('git push origin main') |
| | if push_result != 0: |
| | print("推送失败,可能有冲突,尝试拉取并合并...") |
| | os.system('git pull origin main --rebase') |
| | os.system('git push origin main') |
| | |
| | |
| | os.system('git gc --auto') |
| | os.system('git lfs prune') |
| | else: |
| | print("压缩失败,无法提交") |
| |
|
| | def _reconstruct_token(partial_token): |
| | return partial_token.replace(" ", "") |
| | github(2) |
| |
|