File size: 722 Bytes
5ae3e12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
from peft import PeftModel
from transformers import AutoModelForCausalLM, AutoTokenizer

base_model_path = "hf_release/MINDI-1.0-420M"
lora_path = "output/checkpoints/checkpoint-12000"

print("Loading base model...")
model = AutoModelForCausalLM.from_pretrained(
    base_model_path,
    trust_remote_code=True
)

print("Loading LoRA...")
model = PeftModel.from_pretrained(model, lora_path)

print("Merging...")
model = model.merge_and_unload()

print("Saving final model...")
model.save_pretrained("final_model", safe_serialization=False)

print("Saving tokenizer...")
tokenizer = AutoTokenizer.from_pretrained(
    base_model_path,
    trust_remote_code=True
)
tokenizer.save_pretrained("final_model")

print("✅ DONE")