AI-AVECINNA / loader.py
bdtimuhammad's picture
Update loader.py
ff3a836 verified
raw
history blame contribute delete
919 Bytes
import torch
import gc
import open_clip
class ModelLoader:
def __init__(self):
self.biomed_model = None
self.preprocess = None
def load_biomed_clip(self):
"""Universal Zero-Shot Auditor (BiomedCLIP)"""
if self.biomed_model is None:
print("πŸ”„ Loading BiomedCLIP Universal Auditor...")
model, _, preprocess = open_clip.create_model_and_transforms(
'hf-hub:microsoft/BiomedCLIP-PubMedBERT_256-vit_base_patch16_224'
)
self.biomed_model = model.to("cuda").eval()
self.preprocess = preprocess
return self.biomed_model, self.preprocess
def clear_vram(self):
"""Safety flush to ensure Council stability on T4."""
gc.collect()
if torch.cuda.is_available():
torch.cuda.empty_cache()
# πŸ‘‡ THIS IS THE CRUCIAL LINE THAT WAS MISSING πŸ‘‡
loader = ModelLoader()