File size: 4,977 Bytes
fd357f4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
#!/usr/bin/env python3
"""
Upload verification and safe deletion script
Verifies successful uploads before deleting local copies
"""

import os
import logging
from huggingface_hub import HfApi

# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

def verify_uploads():
    """Verify which files were successfully uploaded to HF Hub"""
    
    token = os.getenv('HF_TOKEN')
    if not token:
        raise ValueError("HF_TOKEN environment variable not set")
    
    api = HfApi(token=token)
    repo_id = "LevelUp2x/dto-models"
    
    # Get list of files in repository
    try:
        repo_info = api.repo_info(repo_id)
        uploaded_files = [file.rfilename for file in repo_info.siblings]
        logger.info(f"Found {len(uploaded_files)} files in {repo_id}")
        
        # List all local model files that should have been uploaded
        local_files = []
        experiments_path = "/data/experiments"
        
        if os.path.exists(experiments_path):
            for root, _, files in os.walk(experiments_path):
                for file in files:
                    if file.endswith(('.safetensors', '.pt', '.bin')):
                        file_path = os.path.join(root, file)
                        try:
                            file_size = os.path.getsize(file_path)
                            # Skip files larger than 10GB
                            if file_size > 10 * 1024**3:
                                continue
                            local_files.append(file_path)
                        except OSError:
                            continue
        
        logger.info(f"Found {len(local_files)} local files eligible for upload")
        
        # Check which local files exist in the repository
        successfully_uploaded = []
        not_uploaded = []
        
        for local_file in local_files:
            # Convert local path to repository path format
            repo_path = local_file.replace('/data/experiments/', '')
            
            if repo_path in uploaded_files:
                successfully_uploaded.append(local_file)
            else:
                not_uploaded.append(local_file)
        
        logger.info(f"✅ {len(successfully_uploaded)} files successfully uploaded")
        logger.info(f"❌ {len(not_uploaded)} files not found in repository")
        
        return successfully_uploaded, not_uploaded
        
    except Exception as e:
        logger.error(f"Failed to verify uploads: {e}")
        return [], []

def safe_delete_files(files_to_delete):
    """Safely delete files after verification"""
    if not files_to_delete:
        logger.info("No files to delete")
        return
    
    logger.warning(f"⚠️  About to delete {len(files_to_delete)} files")
    
    deleted_count = 0
    failed_count = 0
    
    for file_path in files_to_delete:
        try:
            file_size = os.path.getsize(file_path) / (1024**3)  # Size in GB
            os.remove(file_path)
            logger.info(f"🗑️  Deleted {file_path} ({file_size:.1f}GB)")
            deleted_count += 1
        except Exception as e:
            logger.error(f"Failed to delete {file_path}: {e}")
            failed_count += 1
    
    logger.info(f"Deletion summary: {deleted_count} deleted, {failed_count} failed")
    
    # Calculate space reclaimed
    if deleted_count > 0:
        # This would require tracking individual file sizes, but we can estimate
        logger.info("Space reclamation completed successfully")

if __name__ == "__main__":
    # Load environment variables
    env_file = "/data/adaptai/platform/dataops/dto/.env"
    if os.path.exists(env_file):
        with open(env_file) as f:
            for line in f:
                if line.strip() and not line.startswith('#'):
                    key, value = line.strip().split('=', 1)
                    os.environ[key] = value
    
    logger.info("🔍 VERIFYING UPLOADS AND PREPARING FOR SAFE DELETION")
    
    successfully_uploaded, not_uploaded = verify_uploads()
    
    if successfully_uploaded:
        logger.info("\n📋 Files ready for safe deletion:")
        for file in successfully_uploaded[:5]:  # Show first 5 as sample
            logger.info(f"  - {file}")
        if len(successfully_uploaded) > 5:
            logger.info(f"  - ... and {len(successfully_uploaded) - 5} more")
        
        # Uncomment the next line to actually delete files after verification
        safe_delete_files(successfully_uploaded)
        # logger.warning("⚠️  Safe deletion is commented out for safety. Uncomment to enable.")
    
    if not_uploaded:
        logger.warning("\n⚠️  Files not uploaded (will not be deleted):")
        for file in not_uploaded[:5]:
            logger.warning(f"  - {file}")
        if len(not_uploaded) > 5:
            logger.warning(f"  - ... and {len(not_uploaded) - 5} more")