Spaces:
Sleeping
Sleeping
File size: 260 Bytes
1342750 e0eadad 1342750 e0eadad 1342750 e0eadad ed3a093 |
1 2 3 4 5 6 7 8 9 |
# backend/model_loader.py
from transformers import AutoTokenizer, AutoModelForCausalLM
MODEL_PATH = "b1228032/nursing-bloom-1b7-full"
tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH)
model = AutoModelForCausalLM.from_pretrained(MODEL_PATH)
model.eval() |