File size: 513 Bytes
c96ac34 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
from peft import PeftModel
base_dir = "hf_pretrained"
lora_dir = "hf_sft_lora"
out_dir = "hf_sft_merged"
tok = AutoTokenizer.from_pretrained(base_dir, use_fast=True)
model = AutoModelForCausalLM.from_pretrained(base_dir, torch_dtype=torch.float16, device_map="cpu")
model = PeftModel.from_pretrained(model, lora_dir)
model = model.merge_and_unload()
model.save_pretrained(out_dir, safe_serialization=True)
tok.save_pretrained(out_dir)
|