from transformers import AutoModelForMaskedLM, AutoTokenizer checkpoint = "/mnt/algorithm/user_dir/fangyue/OpenRLHF/examples/scripts/ckpt/7b_llama" model = AutoModelForMaskedLM.from_pretrained(checkpoint) tokenizer = AutoTokenizer.from_pretrained(checkpoint) model.push_to_hub("E-PPO") tokenizer.push_to_hub("E-PPO")