File size: 7,051 Bytes
149d2bb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
#!/usr/bin/env python3
"""
Verify basic embeddings (CLIP-BioMed, MedSigLIP, ViT models) against coreset IDs.

Usage:
    cd /Users/cajas.sebastian/Desktop/repositories/qml-mimic-cxr-embeddings
    python3 tests/verify_basic_embeddings.py

This script verifies:
- clip-biomed-embeddings
- medsiglip-448-embeddings
- vit-base-patch32-224-embeddings
- vit-base-patch16-224-embeddings
"""
import pickle
import sys
import os

class NumpyCompatUnpickler(pickle.Unpickler):
    """Custom unpickler to handle numpy version compatibility."""
    def find_class(self, module, name):
        # Handle numpy._core -> numpy.core renaming
        if module.startswith('numpy._core'):
            module = module.replace('numpy._core', 'numpy.core')
        return super().find_class(module, name)

def load_pickle_safe(filepath):
    """Load pickle file with numpy compatibility handling."""
    print(f"Loading pickle: {os.path.basename(filepath)}")

    # Try method 1: Standard pickle with custom unpickler
    try:
        with open(filepath, 'rb') as f:
            df = NumpyCompatUnpickler(f).load()
        print(f"  ✓ Loaded successfully with custom unpickler")
        return df
    except Exception as e1:
        print(f"  ✗ Method 1 failed: {e1}")

    # Try method 2: pandas read_pickle
    try:
        import pandas as pd
        df = pd.read_pickle(filepath)
        print(f"  ✓ Loaded successfully with pandas")
        return df
    except Exception as e2:
        print(f"  ✗ Method 2 failed: {e2}")

    # Try method 3: Standard pickle
    try:
        with open(filepath, 'rb') as f:
            df = pickle.load(f)
        print(f"  ✓ Loaded successfully with standard pickle")
        return df
    except Exception as e3:
        print(f"  ✗ Method 3 failed: {e3}")

    print(f"  ✗ All methods failed to load pickle file")
    return None

def verify_strategy(strategy_num, strategy_name, embedding_files, coreset_file):
    """Verify IDs for a single strategy across multiple embedding types."""
    print(f"\n{'='*80}")
    print(f"STRATEGY {strategy_num}: {strategy_name}")
    print(f"{'='*80}")

    # Check if coreset file exists
    if not os.path.exists(coreset_file):
        print(f"✗ Coreset file not found: {coreset_file}")
        return False

    # Load coreset IDs from text file
    print(f"\nReading coreset IDs from: {os.path.basename(coreset_file)}")
    with open(coreset_file, 'r') as f:
        coreset_ids = set(line.strip() for line in f if line.strip())
    print(f"  Coreset IDs: {len(coreset_ids):,}")

    all_passed = True

    # Check each embedding type
    for emb_name, pkl_path in embedding_files.items():
        print(f"\n  [{emb_name}]")

        if not os.path.exists(pkl_path):
            print(f"    ✗ Pickle file not found: {pkl_path}")
            all_passed = False
            continue

        # Load pickle file
        df = load_pickle_safe(pkl_path)
        if df is None:
            all_passed = False
            continue

        # Extract filenames from pickle
        try:
            pickle_ids = set(df['filename'].tolist())
            print(f"    Pickle IDs: {len(pickle_ids):,}")
        except Exception as e:
            print(f"    ✗ Error extracting filenames: {e}")
            all_passed = False
            continue

        # Compare
        matching_ids = coreset_ids & pickle_ids
        missing_ids = coreset_ids - pickle_ids

        match_rate = (len(matching_ids) / len(coreset_ids) * 100) if len(coreset_ids) > 0 else 0
        status = '✓ PASS' if match_rate == 100 else '✗ FAIL'

        print(f"    {status} Match rate: {match_rate:.2f}% ({len(matching_ids):,}/{len(coreset_ids):,})")

        if len(missing_ids) > 0:
            print(f"    ✗ Missing IDs: {len(missing_ids):,}")
            all_passed = False

    return all_passed

def main():
    print("=" * 80)
    print("BASIC EMBEDDINGS VERIFICATION")
    print("=" * 80)
    print(f"Working directory: {os.getcwd()}")

    os.chdir('/Users/cajas.sebastian/Desktop/repositories/qml-mimic-cxr-embeddings')

    # Define strategies to verify
    strategies = [
        {
            'num': 5,
            'name': 'PathologyStratifiedClean',
            'coreset': 'coreset-ids/seed_0/coreset-has_pathology-5-PathologyStratifiedClean-seed_0.txt',
            'embeddings': {
                'clip-biomed': 'clip-biomed-embeddings/data_type5_insurance.pkl',
                'medsiglip-448': 'medsiglip-448-embeddings/data_type5_medsiglip.pkl',
                'vit-patch32': 'vit-base-patch32-224-embeddings/data_type5_n1999_seed42_vit_base_patch32_224.pkl',
                'vit-patch16': 'vit-base-patch16-224-embeddings/data_type5_n1999_seed42_vit_base_patch16_224.pkl'
            }
        },
        {
            'num': 9,
            'name': 'GradMatch',
            'coreset': 'coreset-ids/seed_0/coreset-has_pathology-9-GradMatch-seed_0.txt',
            'embeddings': {
                'clip-biomed': 'clip-biomed-embeddings/data_type9_insurance_2371rows.pkl',
                'medsiglip-448': 'medsiglip-448-embeddings/data_type9_medsiglip.pkl',
                'vit-patch32': 'vit-base-patch32-224-embeddings/data_type9_n2371_seed42_vit_base_patch32_224.pkl',
                'vit-patch16': 'vit-base-patch16-224-embeddings/data_type9_n2371_seed42_vit_base_patch16_224.pkl'
            }
        },
        {
            'num': 11,
            'name': 'Uncertainty',
            'coreset': 'coreset-ids/seed_0/coreset-has_pathology-11-Uncertainty-seed_0.txt',
            'embeddings': {
                'clip-biomed': 'clip-biomed-embeddings/data_type11_insurance_2371rows.pkl',
                'medsiglip-448': 'medsiglip-448-embeddings/data_type11_medsiglip.pkl',
                'vit-patch32': 'vit-base-patch32-224-embeddings/data_type11_n2371_seed42_vit_base_patch32_224.pkl',
                'vit-patch16': 'vit-base-patch16-224-embeddings/data_type11_n2371_seed42_vit_base_patch16_224.pkl'
            }
        }
    ]

    results = {}
    for strategy in strategies:
        results[strategy['num']] = verify_strategy(
            strategy['num'],
            strategy['name'],
            strategy['embeddings'],
            strategy['coreset']
        )

    # Summary
    print(f"\n{'='*80}")
    print("VERIFICATION SUMMARY")
    print(f"{'='*80}")

    for strategy in strategies:
        num = strategy['num']
        name = strategy['name']
        passed = results[num]
        status = '✓ PASS' if passed else '✗ FAIL'
        print(f"  Strategy {num:2d} ({name:25s}): {status}")

    all_passed = all(results.values())
    print(f"\n{'='*80}")
    if all_passed:
        print("✓✓✓ ALL VERIFICATIONS PASSED! ✓✓✓")
        print("All coreset IDs match the embeddings in pickle files!")
    else:
        print("✗✗✗ SOME VERIFICATIONS FAILED ✗✗✗")
        print("Some IDs don't match. Check the details above.")
    print(f"{'='*80}")

    return 0 if all_passed else 1

if __name__ == "__main__":
    sys.exit(main())