from transformers import AutoModelForCausalLM, AutoTokenizer from huggingface_hub import login import os # Set your Hugging Face API token hf_token = "hf_XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # Log in to Hugging Face login(token=hf_token) # Define the model name and local directory to save the model model_name = "sshleifer/tiny-gpt2" save_directory = "./tiny-gpt2-model" # Create the directory if it doesn't exist os.makedirs(save_directory, exist_ok=True) # Download the model and tokenizer model = AutoModelForCausalLM.from_pretrained(model_name, cache_dir=save_directory) tokenizer = AutoTokenizer.from_pretrained(model_name, cache_dir=save_directory) print(f"Model and tokenizer downloaded successfully to {save_directory}")