Eurus-PPO / upload.py
Takeru's picture
Upload folder using huggingface_hub
897d1c2 verified
raw
history blame
319 Bytes
from transformers import AutoModelForMaskedLM, AutoTokenizer
checkpoint = "/mnt/algorithm/user_dir/fangyue/OpenRLHF/examples/scripts/ckpt/7b_llama"
model = AutoModelForMaskedLM.from_pretrained(checkpoint)
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
model.push_to_hub("E-PPO")
tokenizer.push_to_hub("E-PPO")