File size: 1,489 Bytes
b79dedc 133d507 b79dedc 133d507 b79dedc 133d507 b79dedc | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 | """Download the base NLLB model and push a deploy-ready copy to the Hub.
Pushes both the weights/tokenizer and the Inference-Endpoints artifacts
(`handler.py`, `requirements.txt`, `README.md`) so a subsequent
"Deploy → Inference Endpoints" click on the Hub just works.
Usage:
hf auth login # or set HF_TOKEN
python model_loader.py
"""
from __future__ import annotations
from pathlib import Path
from huggingface_hub import HfApi
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
BASE = "facebook/nllb-200-distilled-600M"
REPO = "ericaRC/example"
ENDPOINT_FILES = ("handler.py", "requirements.txt", "README.md", "TROUBLESHOOTING.md")
def push_weights() -> None:
tokenizer = AutoTokenizer.from_pretrained(BASE)
model = AutoModelForSeq2SeqLM.from_pretrained(BASE)
tokenizer.push_to_hub(REPO)
model.push_to_hub(REPO)
def push_endpoint_files() -> None:
api = HfApi()
api.create_repo(REPO, exist_ok=True)
repo_root = Path(__file__).resolve().parent
for name in ENDPOINT_FILES:
path = repo_root / name
if not path.exists():
print(f"[skip] {name} not found next to model_loader.py")
continue
api.upload_file(
path_or_fileobj=str(path),
path_in_repo=name,
repo_id=REPO,
commit_message=f"Update {name}",
)
print(f"[ok] uploaded {name}")
if __name__ == "__main__":
push_weights()
push_endpoint_files()
|