ADAPT-Chase's picture
Add files using upload-large-folder tool
fd357f4 verified
raw
history blame
4.98 kB
#!/usr/bin/env python3
"""
Upload verification and safe deletion script
Verifies successful uploads before deleting local copies
"""
import os
import logging
from huggingface_hub import HfApi
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def verify_uploads():
"""Verify which files were successfully uploaded to HF Hub"""
token = os.getenv('HF_TOKEN')
if not token:
raise ValueError("HF_TOKEN environment variable not set")
api = HfApi(token=token)
repo_id = "LevelUp2x/dto-models"
# Get list of files in repository
try:
repo_info = api.repo_info(repo_id)
uploaded_files = [file.rfilename for file in repo_info.siblings]
logger.info(f"Found {len(uploaded_files)} files in {repo_id}")
# List all local model files that should have been uploaded
local_files = []
experiments_path = "/data/experiments"
if os.path.exists(experiments_path):
for root, _, files in os.walk(experiments_path):
for file in files:
if file.endswith(('.safetensors', '.pt', '.bin')):
file_path = os.path.join(root, file)
try:
file_size = os.path.getsize(file_path)
# Skip files larger than 10GB
if file_size > 10 * 1024**3:
continue
local_files.append(file_path)
except OSError:
continue
logger.info(f"Found {len(local_files)} local files eligible for upload")
# Check which local files exist in the repository
successfully_uploaded = []
not_uploaded = []
for local_file in local_files:
# Convert local path to repository path format
repo_path = local_file.replace('/data/experiments/', '')
if repo_path in uploaded_files:
successfully_uploaded.append(local_file)
else:
not_uploaded.append(local_file)
logger.info(f"โœ… {len(successfully_uploaded)} files successfully uploaded")
logger.info(f"โŒ {len(not_uploaded)} files not found in repository")
return successfully_uploaded, not_uploaded
except Exception as e:
logger.error(f"Failed to verify uploads: {e}")
return [], []
def safe_delete_files(files_to_delete):
"""Safely delete files after verification"""
if not files_to_delete:
logger.info("No files to delete")
return
logger.warning(f"โš ๏ธ About to delete {len(files_to_delete)} files")
deleted_count = 0
failed_count = 0
for file_path in files_to_delete:
try:
file_size = os.path.getsize(file_path) / (1024**3) # Size in GB
os.remove(file_path)
logger.info(f"๐Ÿ—‘๏ธ Deleted {file_path} ({file_size:.1f}GB)")
deleted_count += 1
except Exception as e:
logger.error(f"Failed to delete {file_path}: {e}")
failed_count += 1
logger.info(f"Deletion summary: {deleted_count} deleted, {failed_count} failed")
# Calculate space reclaimed
if deleted_count > 0:
# This would require tracking individual file sizes, but we can estimate
logger.info("Space reclamation completed successfully")
if __name__ == "__main__":
# Load environment variables
env_file = "/data/adaptai/platform/dataops/dto/.env"
if os.path.exists(env_file):
with open(env_file) as f:
for line in f:
if line.strip() and not line.startswith('#'):
key, value = line.strip().split('=', 1)
os.environ[key] = value
logger.info("๐Ÿ” VERIFYING UPLOADS AND PREPARING FOR SAFE DELETION")
successfully_uploaded, not_uploaded = verify_uploads()
if successfully_uploaded:
logger.info("\n๐Ÿ“‹ Files ready for safe deletion:")
for file in successfully_uploaded[:5]: # Show first 5 as sample
logger.info(f" - {file}")
if len(successfully_uploaded) > 5:
logger.info(f" - ... and {len(successfully_uploaded) - 5} more")
# Uncomment the next line to actually delete files after verification
safe_delete_files(successfully_uploaded)
# logger.warning("โš ๏ธ Safe deletion is commented out for safety. Uncomment to enable.")
if not_uploaded:
logger.warning("\nโš ๏ธ Files not uploaded (will not be deleted):")
for file in not_uploaded[:5]:
logger.warning(f" - {file}")
if len(not_uploaded) > 5:
logger.warning(f" - ... and {len(not_uploaded) - 5} more")