| library_name: transformers | |
| tags: [] | |
| # Just for PEFT testing | |
| See PR #1658 | |
| ## Reproduction | |
| ```python | |
| import torch | |
| from huggingface_hub import cached_download | |
| from transformers import AutoModelForCausalLM | |
| from peft import LoraConfig, PeftModel, get_peft_model | |
| torch.manual_seed(0) | |
| model = AutoModelForCausalLM.from_pretrained("hf-internal-testing/tiny-random-BertModel").eval() | |
| lora_config = LoraConfig(init_lora_weights=False) | |
| peft_model = get_peft_model(model, lora_config) | |
| peft_model.push_to_hub("peft-internal-testing/tiny-random-BertModel-lora") | |
| del model, peft_model | |
| torch.manual_seed(0) | |
| model = AutoModelForCausalLM.from_pretrained("hf-internal-testing/tiny-random-BertModel").eval() | |
| torch.manual_seed(1) | |
| lora_config = LoraConfig(init_lora_weights=False) | |
| peft_model = get_peft_model(model, lora_config) | |
| peft_model.push_to_hub("peft-internal-testing/tiny-random-BertModel-lora", revision="v1.2.3") | |
| ``` |