File size: 1,596 Bytes
989ce21 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import torch
import sys
from transformers import AutoModel, AutoConfig
# Repository ID on Hugging Face Hub
REPO_ID = "StarNetLaboratory/mosaic"
# Since custom modeling code (foundation_bert.py) is used,
# trust_remote_code must be set to True.
TRUST_CODE = True
print(f"--- 1. Attempting to load configuration ---")
try:
# Attempt to load config to verify config.json is present and readable
config = AutoConfig.from_pretrained(REPO_ID, trust_remote_code=TRUST_CODE)
print(f"✅ Config loaded successfully: {config.architectures}")
except Exception as e:
print(f"❌ Config loading failed: {e}")
sys.exit(1)
print(f"\n--- 2. Attempting to load model ---")
try:
# Detect device (GPU if available, else CPU)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# This call triggers transformers to download and execute foundation_bert.py
# and load weights from model.safetensors
model = AutoModel.from_pretrained(
REPO_ID,
config=config,
trust_remote_code=TRUST_CODE,
torch_dtype=torch.float32 # Match the dtype used during training/local testing
).to(device)
model.eval()
# Calculate and print total parameters to verify the architecture
total_params = sum(p.numel() for p in model.parameters())
print(f"✅ Model loaded successfully! Total parameters: {total_params:,}")
except Exception as e:
print(f"❌ Model loading failed.")
print(f"Check file integrity and remote code logic (foundation_bert.py).")
print(f"Error details: {e}")
sys.exit(1)
|