| | |
| | """ |
| | Upload verification and safe deletion script |
| | Verifies successful uploads before deleting local copies |
| | """ |
| |
|
| | import os |
| | import logging |
| | from huggingface_hub import HfApi |
| |
|
| | |
| | logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') |
| | logger = logging.getLogger(__name__) |
| |
|
| | def verify_uploads(): |
| | """Verify which files were successfully uploaded to HF Hub""" |
| | |
| | token = os.getenv('HF_TOKEN') |
| | if not token: |
| | raise ValueError("HF_TOKEN environment variable not set") |
| | |
| | api = HfApi(token=token) |
| | repo_id = "LevelUp2x/dto-models" |
| | |
| | |
| | try: |
| | repo_info = api.repo_info(repo_id) |
| | uploaded_files = [file.rfilename for file in repo_info.siblings] |
| | logger.info(f"Found {len(uploaded_files)} files in {repo_id}") |
| | |
| | |
| | local_files = [] |
| | experiments_path = "/data/experiments" |
| | |
| | if os.path.exists(experiments_path): |
| | for root, _, files in os.walk(experiments_path): |
| | for file in files: |
| | if file.endswith(('.safetensors', '.pt', '.bin')): |
| | file_path = os.path.join(root, file) |
| | try: |
| | file_size = os.path.getsize(file_path) |
| | |
| | if file_size > 10 * 1024**3: |
| | continue |
| | local_files.append(file_path) |
| | except OSError: |
| | continue |
| | |
| | logger.info(f"Found {len(local_files)} local files eligible for upload") |
| | |
| | |
| | successfully_uploaded = [] |
| | not_uploaded = [] |
| | |
| | for local_file in local_files: |
| | |
| | repo_path = local_file.replace('/data/experiments/', '') |
| | |
| | if repo_path in uploaded_files: |
| | successfully_uploaded.append(local_file) |
| | else: |
| | not_uploaded.append(local_file) |
| | |
| | logger.info(f"โ
{len(successfully_uploaded)} files successfully uploaded") |
| | logger.info(f"โ {len(not_uploaded)} files not found in repository") |
| | |
| | return successfully_uploaded, not_uploaded |
| | |
| | except Exception as e: |
| | logger.error(f"Failed to verify uploads: {e}") |
| | return [], [] |
| |
|
| | def safe_delete_files(files_to_delete): |
| | """Safely delete files after verification""" |
| | if not files_to_delete: |
| | logger.info("No files to delete") |
| | return |
| | |
| | logger.warning(f"โ ๏ธ About to delete {len(files_to_delete)} files") |
| | |
| | deleted_count = 0 |
| | failed_count = 0 |
| | |
| | for file_path in files_to_delete: |
| | try: |
| | file_size = os.path.getsize(file_path) / (1024**3) |
| | os.remove(file_path) |
| | logger.info(f"๐๏ธ Deleted {file_path} ({file_size:.1f}GB)") |
| | deleted_count += 1 |
| | except Exception as e: |
| | logger.error(f"Failed to delete {file_path}: {e}") |
| | failed_count += 1 |
| | |
| | logger.info(f"Deletion summary: {deleted_count} deleted, {failed_count} failed") |
| | |
| | |
| | if deleted_count > 0: |
| | |
| | logger.info("Space reclamation completed successfully") |
| |
|
| | if __name__ == "__main__": |
| | |
| | env_file = "/data/adaptai/platform/dataops/dto/.env" |
| | if os.path.exists(env_file): |
| | with open(env_file) as f: |
| | for line in f: |
| | if line.strip() and not line.startswith('#'): |
| | key, value = line.strip().split('=', 1) |
| | os.environ[key] = value |
| | |
| | logger.info("๐ VERIFYING UPLOADS AND PREPARING FOR SAFE DELETION") |
| | |
| | successfully_uploaded, not_uploaded = verify_uploads() |
| | |
| | if successfully_uploaded: |
| | logger.info("\n๐ Files ready for safe deletion:") |
| | for file in successfully_uploaded[:5]: |
| | logger.info(f" - {file}") |
| | if len(successfully_uploaded) > 5: |
| | logger.info(f" - ... and {len(successfully_uploaded) - 5} more") |
| | |
| | |
| | safe_delete_files(successfully_uploaded) |
| | |
| | |
| | if not_uploaded: |
| | logger.warning("\nโ ๏ธ Files not uploaded (will not be deleted):") |
| | for file in not_uploaded[:5]: |
| | logger.warning(f" - {file}") |
| | if len(not_uploaded) > 5: |
| | logger.warning(f" - ... and {len(not_uploaded) - 5} more") |