|
|
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
from huggingface_hub import login
|
|
|
import os
|
|
|
|
|
|
|
|
|
hf_token = "hf_XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
|
|
|
|
|
|
|
|
login(token=hf_token)
|
|
|
|
|
|
|
|
|
model_name = "sshleifer/tiny-gpt2"
|
|
|
save_directory = "./tiny-gpt2-model"
|
|
|
|
|
|
|
|
|
os.makedirs(save_directory, exist_ok=True)
|
|
|
|
|
|
|
|
|
model = AutoModelForCausalLM.from_pretrained(model_name, cache_dir=save_directory)
|
|
|
tokenizer = AutoTokenizer.from_pretrained(model_name, cache_dir=save_directory)
|
|
|
|
|
|
print(f"Model and tokenizer downloaded successfully to {save_directory}") |