from peft import PeftModel from transformers import AutoModelForCausalLM, AutoTokenizer base_model_path = "hf_release/MINDI-1.0-420M" lora_path = "output/checkpoints/checkpoint-12000" print("Loading base model...") model = AutoModelForCausalLM.from_pretrained( base_model_path, trust_remote_code=True ) print("Loading LoRA...") model = PeftModel.from_pretrained(model, lora_path) print("Merging...") model = model.merge_and_unload() print("Saving final model...") model.save_pretrained("final_model", safe_serialization=False) print("Saving tokenizer...") tokenizer = AutoTokenizer.from_pretrained( base_model_path, trust_remote_code=True ) tokenizer.save_pretrained("final_model") print("✅ DONE")