Sentinel / model_loader.py
prelington's picture
Create model_loader.py
9ae4384 verified
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
def load_model(model_name="your-username/sentinel"):
"""
Load Sentinel model and tokenizer.
"""
print(f"Loading {model_name}...")
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(
model_name,
device_map="auto", # Uses GPU if available
trust_remote_code=True
)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
return generator