from transformers import AutoModelForCausalLM, AutoTokenizer from huggingface_hub import HfApi, login import os # 1. Login (add HF_TOKEN in Space Settings -> Secrets) login(token=os.environ["HF_TOKEN"]) # 2. Load and save model files model_id = "teknium/OpenHermes-2.5-Mistral-7B" tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForCausalLM.from_pretrained(model_id) # Save files temporarily os.makedirs("BuddAi", exist_ok=True) model.save_pretrained("BuddAi", safe_serialization=True) tokenizer.save_pretrained("BuddAi") # 3. Upload to your model repo api = HfApi() api.upload_folder( folder_path="BuddAi", repo_id="CaptMetal/BuddAi", repo_type="model" ) print("Upload complete! Visit https://huggingface.co/CaptMetal/BuddAi")