Spaces:
Runtime error
Runtime error
File size: 429 Bytes
6f8345d 6e67f7a 6f8345d 6e67f7a 6f8345d 6e67f7a | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 | import torch
from transformers import AutoTokenizer, Mistral3ForConditionalGeneration, FineGrainedFP8Config
model_id = "mistralai/Ministral-3-14B-Instruct-2512"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = Mistral3ForConditionalGeneration.from_pretrained(
model_id,
device_map="auto",
quantization_config=FineGrainedFP8Config(dequantize=True),
torch_dtype=torch.bfloat16
)
print("Model loaded") |