| import torch | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| from peft import PeftModel | |
| # Load the base model and tokenizer | |
| base_model_id = "RiverTest/autotrain-uiny8-3o6jx" # Replace with your base model | |
| tokenizer = AutoTokenizer.from_pretrained(base_model_id) | |
| base_model = AutoModelForCausalLM.from_pretrained(base_model_id) | |
| # Load the fine-tuned model | |
| ft_model_id = "RiverTest/TrainerToMerge" # Replace with your fine-tuned model | |
| ft_model = PeftModel.from_pretrained(base_model, ft_model_id) | |
| merged_model = ft_model.merge_and_unload() | |
| # Specify the folder path where you want to save the merged model | |
| output_folder = '.' # Replace 'yourFolder' with your desired folder name | |
| # Save the merged model to the specified folder | |
| merged_model.save_pretrained(output_folder) | |