File size: 319 Bytes
897d1c2 | 1 2 3 4 5 6 7 8 9 | from transformers import AutoModelForMaskedLM, AutoTokenizer
checkpoint = "/mnt/algorithm/user_dir/fangyue/OpenRLHF/examples/scripts/ckpt/7b_llama"
model = AutoModelForMaskedLM.from_pretrained(checkpoint)
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
model.push_to_hub("E-PPO")
tokenizer.push_to_hub("E-PPO") |