|
|
from transformers import AutoTokenizer, T5ForConditionalGeneration |
|
|
from huggingface_hub import HfApi, HfFolder |
|
|
import os |
|
|
|
|
|
|
|
|
hf_token = os.environ["huggingface"] |
|
|
|
|
|
|
|
|
local_model_dir = "./flan-t5-autobatch" |
|
|
|
|
|
|
|
|
repo_id = "ajkndfjsdfasdf/flan-5-small-bigdataset" |
|
|
|
|
|
|
|
|
api = HfApi() |
|
|
HfFolder.save_token(hf_token) |
|
|
|
|
|
|
|
|
try: |
|
|
api.repo_info(repo_id, token=hf_token) |
|
|
print(f"📦 Репозиторий {repo_id} уже существует.") |
|
|
except: |
|
|
print(f"📦 Репозиторий {repo_id} не найден. Создаём...") |
|
|
api.create_repo(repo_id=repo_id, token=hf_token, repo_type="model", exist_ok=True) |
|
|
|
|
|
|
|
|
model = T5ForConditionalGeneration.from_pretrained(local_model_dir) |
|
|
tokenizer = AutoTokenizer.from_pretrained(local_model_dir) |
|
|
|
|
|
|
|
|
model.push_to_hub(repo_id, token=hf_token, commit_message="🚀 Push latest model to root") |
|
|
tokenizer.push_to_hub(repo_id, token=hf_token, commit_message="🚀 Push latest tokenizer to root") |
|
|
|
|
|
print(f"✅ Модель загружена в: https://huggingface.co/{repo_id}") |
|
|
|