File size: 322 Bytes
3fdc808 a86762d 3fdc808 a86762d 3fdc808 a86762d 3fdc808 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 | from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
model_name = "1c1/7cpc"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(
model_name,
device_map="auto",
load_in_8bit=True,
torch_dtype=torch.float16,
low_cpu_mem_usage=True
) |