|
|
import subprocess |
|
|
import os |
|
|
import threading |
|
|
import time |
|
|
import yaml |
|
|
from datetime import datetime |
|
|
import signal |
|
|
import psutil |
|
|
import glob |
|
|
import re |
|
|
import pytz |
|
|
|
|
|
|
|
|
BACKUP_TIME = os.environ.get('BACKUP_TIME', '1200') |
|
|
BACKUP_TIME = 30 |
|
|
HF_USER1 = os.environ.get('HF_USER1', '') |
|
|
HF_REPO = os.environ.get('HF_REPO', '') |
|
|
HF_EMAIL = os.environ.get('HF_EMAIL', '') |
|
|
HF_TOKEN1 = os.environ.get('HF_TOKEN1', '') |
|
|
|
|
|
HF_USER2 = os.environ.get('HF_USER2', '') |
|
|
HF_ID = os.environ.get('HF_ID', '') |
|
|
HF_TOKEN2 = os.environ.get('HF_TOKON2', '') |
|
|
|
|
|
def cloudflared(): |
|
|
os.system("rm -r /data/cf") |
|
|
os.system("wget -O '/data/cf' -q 'https://github.com/cloudflare/cloudflared/releases/download/2025.9.0/cloudflared-linux-amd64'") |
|
|
os.system("chmod +x /data/cf") |
|
|
|
|
|
os.system('/data/cf tunnel run --token eyJhIjoiZWM1MTk5ZTYwZGYxYWI2YmM2OTdhMGYzMTAzYzY4NTUiLCJ0IjoiOTY0NDIwODgtN2RkOS00Y2IwLThhMDEtYTI1YzZmMjAwZjBlIiwicyI6Ik9XSmpNVFpsTjJZdE5UWTRNQzAwTW1VMkxUaGxaREV0T1RRNU1EZzROVFV3T0dFeiJ9') |
|
|
threading.Thread(target=nezha, daemon=True).start() |
|
|
def get_latest_local_package(directory, pattern='*.tar.gz'): |
|
|
try: |
|
|
|
|
|
search_pattern = os.path.join(directory, pattern) |
|
|
|
|
|
|
|
|
files = glob.glob(search_pattern) |
|
|
|
|
|
if not files: |
|
|
print("未找到匹配的 nezha-hf 压缩包") |
|
|
return None |
|
|
|
|
|
|
|
|
latest_file = max(files, key=os.path.getmtime) |
|
|
|
|
|
print(f"找到最新的包: {latest_file}") |
|
|
return latest_file |
|
|
|
|
|
except Exception as e: |
|
|
print(f"获取最新包时发生错误: {e}") |
|
|
return None |
|
|
def compress_folder(folder_path, output_dir, keep_count=3): |
|
|
try: |
|
|
|
|
|
os.makedirs(output_dir, exist_ok=True) |
|
|
|
|
|
|
|
|
import pytz |
|
|
from datetime import datetime |
|
|
|
|
|
|
|
|
china_tz = pytz.timezone('Asia/Shanghai') |
|
|
|
|
|
|
|
|
timestamp = str(int(datetime.now(china_tz).timestamp() * 1000)) |
|
|
output_path = os.path.join(output_dir, f'{timestamp}.tar.gz') |
|
|
|
|
|
|
|
|
existing_archives = glob.glob(os.path.join(output_dir, '*.tar.gz')) |
|
|
|
|
|
|
|
|
def extract_timestamp(filename): |
|
|
|
|
|
match = re.search(r'(\d+)\.tar\.gz$', filename) |
|
|
return int(match.group(1)) if match else 0 |
|
|
|
|
|
|
|
|
if len(existing_archives) >= keep_count: |
|
|
|
|
|
existing_archives.sort(key=extract_timestamp) |
|
|
|
|
|
|
|
|
delete_count = len(existing_archives) - keep_count + 1 |
|
|
|
|
|
|
|
|
for i in range(delete_count): |
|
|
oldest_archive = existing_archives[i] |
|
|
try: |
|
|
os.remove(oldest_archive) |
|
|
print(f"删除最旧的压缩包:{os.path.basename(oldest_archive)}") |
|
|
except Exception as e: |
|
|
print(f"删除失败 {oldest_archive}: {e}") |
|
|
|
|
|
|
|
|
result = subprocess.run( |
|
|
['tar', '-czvf', output_path, folder_path], |
|
|
capture_output=True, |
|
|
text=True |
|
|
) |
|
|
|
|
|
if result.returncode == 0: |
|
|
|
|
|
file_size = os.path.getsize(output_path) / 1024 / 1024 |
|
|
|
|
|
|
|
|
china_time = datetime.now(china_tz) |
|
|
formatted_time = china_time.strftime('%Y-%m-%d %H:%M:%S') |
|
|
|
|
|
print(f"压缩成功:{output_path}") |
|
|
print(f"压缩大小:{file_size:.2f} MB") |
|
|
print(f"压缩时间:{formatted_time}") |
|
|
print(f"保留策略:最多保留 {keep_count} 个备份包") |
|
|
|
|
|
|
|
|
return f"{os.path.basename(output_path)} MB:{file_size:.2f} MB TIME:{formatted_time}" |
|
|
else: |
|
|
print("压缩失败") |
|
|
print("错误信息:", result.stderr) |
|
|
return None |
|
|
|
|
|
except Exception as e: |
|
|
print(f"压缩出错: {e}") |
|
|
return None |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def github(type): |
|
|
if type == 1: |
|
|
os.system(f'rm -rf /data/{HF_REPO} /data/ff /data/data') |
|
|
if not os.path.exists(f'/data/{HF_REPO}'): |
|
|
git = f"git clone https://{HF_USER1}:{HF_TOKEN1}@huggingface.co/{HF_USER1}/{HF_REPO}" |
|
|
print(git) |
|
|
os.system(git) |
|
|
os.system(f'git config --global user.email "{HF_EMAIL}"') |
|
|
os.system(f'git config --global user.name "{HF_USER1}"') |
|
|
os.system("ls") |
|
|
latest_package = get_latest_local_package(f'/data/{HF_REPO}') |
|
|
print(f"最新压缩包路径: {latest_package}") |
|
|
|
|
|
os.system(f"tar -xzf {latest_package} -C /data") |
|
|
os.system("mv /data/f/ff /data/") |
|
|
os.system("mv /data/data/f/ff /data/") |
|
|
os.system("rm -rf /data/data /data/f") |
|
|
|
|
|
if type == 2: |
|
|
print(f"开始备份上传HF仓库:{HF_REPO}") |
|
|
os.system("mkdir -p /data/f") |
|
|
os.system("cp -rf /data/ff /data/f") |
|
|
os.system('git lfs prune') |
|
|
|
|
|
new_archive_info = compress_folder('/data/f', f'/data/{HF_REPO}', keep_count=3) |
|
|
if new_archive_info: |
|
|
new_archive, file_size_info = new_archive_info.split(' MB:') |
|
|
os.chdir(f'/data/{HF_REPO}') |
|
|
os.system("pwd") |
|
|
os.system(f'git add .') |
|
|
os.system(f'git commit -m "{file_size_info}"') |
|
|
|
|
|
os.system('git push -f origin main') |
|
|
os.system('git gc --prune=now') |
|
|
else: |
|
|
print("压缩失败,无法提交") |
|
|
|
|
|
def _reconstruct_token(partial_token): |
|
|
return partial_token.replace(" ", "") |
|
|
def restart_huggingface_space(space_name, space_id, partial_token): |
|
|
token = _reconstruct_token(partial_token) |
|
|
url = f"https://huggingface.co/api/spaces/{space_name}/{space_id}/restart?factory=true" |
|
|
headers = { |
|
|
"Content-Type": "application/json", |
|
|
"Authorization": f"Bearer {token}", |
|
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" |
|
|
} |
|
|
try: |
|
|
response = requests.post(url, headers=headers, json={}) |
|
|
return { |
|
|
"status_code": response.status_code, |
|
|
"success": response.status_code == 200, |
|
|
"message": response.text |
|
|
} |
|
|
except requests.RequestException as e: |
|
|
return { |
|
|
"status_code": None, |
|
|
"success": False, |
|
|
"message": str(e) |
|
|
} |
|
|
def check_system_resources(): |
|
|
time.sleep(120) |
|
|
cpu_usage = psutil.cpu_percent(interval=1) |
|
|
memory = psutil.virtual_memory() |
|
|
memory_usage = memory.percent |
|
|
|
|
|
if cpu_usage >= 90 or memory_usage >= 95: |
|
|
print("占用过高") |
|
|
result = restart_huggingface_space(HF_USER2, HF_ID, HF_TOKON2) |
|
|
print(result) |
|
|
else: |
|
|
print("系统资源正常") |
|
|
|
|
|
def repeat_task(): |
|
|
print('备份线程启动') |
|
|
while True: |
|
|
print(f"打包时间:{BACKUP_TIME} 秒") |
|
|
time.sleep(int(BACKUP_TIME)) |
|
|
github(2) |
|
|
github(1) |
|
|
|
|
|
if os.path.exists('/data/ff/.mozilla/firefox/profiles.ini') and os.path.isfile('/data/ff/.mozilla/firefox/profiles.ini'): |
|
|
threading.Thread(target=repeat_task, daemon=True).start() |
|
|
while True: |
|
|
print("检测到Firefox配置,启动定期重启循环...") |
|
|
time.sleep(21600) |
|
|
github(2) |
|
|
result = restart_huggingface_space(HF_USER2, HF_ID, HF_TOKON2) |
|
|
print(result) |
|
|
|
|
|
|
|
|
|