agent-ai / core /tokenizer.py
Mauricio-100's picture
Create core/tokenizer.py
39f5261 verified
raw
history blame contribute delete
220 Bytes
from transformers import AutoTokenizer
def load_tokenizer(model_name: str):
print(f"🔤 Chargement du tokenizer pour {model_name}...")
tokenizer = AutoTokenizer.from_pretrained(model_name)
return tokenizer