|
|
|
|
|
""" |
|
|
HuggingFace Xet Push for Elizabeth Data |
|
|
Uses HF's built-in Xet integration for efficient data versioning |
|
|
""" |
|
|
|
|
|
import os |
|
|
import json |
|
|
import shutil |
|
|
from datetime import datetime |
|
|
from pathlib import Path |
|
|
from huggingface_hub import HfApi |
|
|
import logging |
|
|
|
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
class ElizabethHFXetPush: |
|
|
"""Push Elizabeth data to HuggingFace using Xet integration""" |
|
|
|
|
|
def __init__(self): |
|
|
self.api = HfApi() |
|
|
self.dataset_id = "LevelUp2x/elizabeth-data" |
|
|
self.temp_dir = "/tmp/elizabeth_xet_upload" |
|
|
|
|
|
|
|
|
os.makedirs(self.temp_dir, exist_ok=True) |
|
|
|
|
|
def prepare_data(self): |
|
|
"""Prepare all Elizabeth data for upload""" |
|
|
logger.info("Preparing Elizabeth data for HF Xet upload...") |
|
|
|
|
|
|
|
|
data_structure = { |
|
|
"version": "1.0", |
|
|
"timestamp": datetime.now().isoformat(), |
|
|
"elizabeth_version": "v0.0.2", |
|
|
"data_sources": [] |
|
|
} |
|
|
|
|
|
|
|
|
db_files = [] |
|
|
if os.path.exists("/workspace/elizabeth_memory.db"): |
|
|
shutil.copy2("/workspace/elizabeth_memory.db", self.temp_dir) |
|
|
db_files.append("elizabeth_memory.db") |
|
|
logger.info("✓ Copied elizabeth_memory.db") |
|
|
|
|
|
if os.path.exists("/workspace/nova_memory.db"): |
|
|
shutil.copy2("/workspace/nova_memory.db", self.temp_dir) |
|
|
db_files.append("nova_memory.db") |
|
|
logger.info("✓ Copied nova_memory.db") |
|
|
|
|
|
if db_files: |
|
|
data_structure["data_sources"].append({ |
|
|
"type": "databases", |
|
|
"files": db_files, |
|
|
"description": "SQLite databases with conversation history" |
|
|
}) |
|
|
|
|
|
|
|
|
repo_dir = os.path.join(self.temp_dir, "repository") |
|
|
os.makedirs(repo_dir, exist_ok=True) |
|
|
|
|
|
|
|
|
essential_paths = [ |
|
|
"/workspace/elizabeth-repo/versions", |
|
|
"/workspace/elizabeth-repo/src", |
|
|
"/workspace/elizabeth-repo/tools", |
|
|
"/workspace/elizabeth-repo/scripts", |
|
|
"/workspace/elizabeth-repo/README.md", |
|
|
"/workspace/elizabeth-repo/requirements.txt" |
|
|
] |
|
|
|
|
|
for path in essential_paths: |
|
|
if os.path.exists(path): |
|
|
if os.path.isfile(path): |
|
|
shutil.copy2(path, repo_dir) |
|
|
else: |
|
|
dest_path = os.path.join(repo_dir, os.path.basename(path)) |
|
|
shutil.copytree(path, dest_path, dirs_exist_ok=True) |
|
|
|
|
|
logger.info("✓ Copied repository structure") |
|
|
data_structure["data_sources"].append({ |
|
|
"type": "code", |
|
|
"description": "Elizabeth repository with versions v0.0.1 and v0.0.2" |
|
|
}) |
|
|
|
|
|
|
|
|
manifest_path = os.path.join(self.temp_dir, "manifest.json") |
|
|
with open(manifest_path, 'w') as f: |
|
|
json.dump(data_structure, f, indent=2) |
|
|
|
|
|
logger.info("✓ Created data manifest") |
|
|
|
|
|
return data_structure |
|
|
|
|
|
def upload_to_hf(self, commit_message=None): |
|
|
"""Upload prepared data to HuggingFace dataset""" |
|
|
|
|
|
if not commit_message: |
|
|
commit_message = f"Elizabeth data update {datetime.now().strftime('%Y-%m-%d %H:%M')}" |
|
|
|
|
|
try: |
|
|
logger.info(f"Uploading to HuggingFace dataset: {self.dataset_id}") |
|
|
|
|
|
|
|
|
try: |
|
|
self.api.whoami() |
|
|
except Exception as auth_error: |
|
|
logger.error(f"Authentication failed: {auth_error}") |
|
|
logger.error("Please set HF_TOKEN environment variable:") |
|
|
logger.error("export HF_TOKEN='your_huggingface_token_here'") |
|
|
logger.error("Or login with: huggingface-cli login") |
|
|
return { |
|
|
"success": False, |
|
|
"error": f"Authentication required: {auth_error}", |
|
|
"instructions": "Set HF_TOKEN environment variable or run 'huggingface-cli login'" |
|
|
} |
|
|
|
|
|
|
|
|
try: |
|
|
self.api.dataset_info(self.dataset_id) |
|
|
except: |
|
|
logger.info("Dataset doesn't exist, creating...") |
|
|
self.api.create_repo( |
|
|
self.dataset_id, |
|
|
repo_type="dataset" |
|
|
) |
|
|
|
|
|
|
|
|
self.api.upload_folder( |
|
|
folder_path=self.temp_dir, |
|
|
repo_id=self.dataset_id, |
|
|
repo_type="dataset", |
|
|
commit_message=commit_message, |
|
|
|
|
|
) |
|
|
|
|
|
logger.info("✅ Upload completed successfully!") |
|
|
logger.info(f"Dataset URL: https://huggingface.co/datasets/{self.dataset_id}") |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"dataset_url": f"https://huggingface.co/datasets/{self.dataset_id}", |
|
|
"commit_message": commit_message |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"Upload failed: {e}") |
|
|
return { |
|
|
"success": False, |
|
|
"error": str(e) |
|
|
} |
|
|
|
|
|
def cleanup(self): |
|
|
"""Clean up temporary files""" |
|
|
if os.path.exists(self.temp_dir): |
|
|
shutil.rmtree(self.temp_dir) |
|
|
logger.info("Cleaned up temporary files") |
|
|
|
|
|
def run_full_upload(self): |
|
|
"""Run complete upload process""" |
|
|
try: |
|
|
|
|
|
data_info = self.prepare_data() |
|
|
|
|
|
|
|
|
result = self.upload_to_hf() |
|
|
|
|
|
|
|
|
self.cleanup() |
|
|
|
|
|
return { |
|
|
"preparation": data_info, |
|
|
"upload": result |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
self.cleanup() |
|
|
return { |
|
|
"success": False, |
|
|
"error": str(e) |
|
|
} |
|
|
|
|
|
def main(): |
|
|
"""Command line interface""" |
|
|
import argparse |
|
|
|
|
|
parser = argparse.ArgumentParser(description="Elizabeth HF Xet Upload") |
|
|
parser.add_argument("--upload", action="store_true", help="Upload data to HuggingFace") |
|
|
parser.add_argument("--prepare-only", action="store_true", help="Only prepare data, don't upload") |
|
|
parser.add_argument("--commit-message", help="Custom commit message") |
|
|
|
|
|
args = parser.parse_args() |
|
|
|
|
|
uploader = ElizabethHFXetPush() |
|
|
|
|
|
if args.prepare_only: |
|
|
|
|
|
data_info = uploader.prepare_data() |
|
|
print("Data prepared at:", uploader.temp_dir) |
|
|
print("Manifest:") |
|
|
print(json.dumps(data_info, indent=2)) |
|
|
|
|
|
elif args.upload: |
|
|
|
|
|
result = uploader.run_full_upload() |
|
|
print("Upload result:") |
|
|
print(json.dumps(result, indent=2)) |
|
|
|
|
|
else: |
|
|
|
|
|
print("Elizabeth HF Xet Upload Tool") |
|
|
print("Dataset:", uploader.dataset_id) |
|
|
print("Usage: python hf_xet_push.py --upload") |
|
|
print("Options: --prepare-only, --commit-message 'Custom message'") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |