File size: 278 Bytes
8775924
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
from transformers import AutoModelForCausalLM
import torch

def load_model(model_name: str):
    print(f"🧠 Chargement du modèle {model_name}...")
    model = AutoModelForCausalLM.from_pretrained(
        model_name,
        torch_dtype=torch.bfloat16
    )
    return model