| """Download the base NLLB model and push a deploy-ready copy to the Hub. |
| |
| Pushes both the weights/tokenizer and the Inference-Endpoints artifacts |
| (`handler.py`, `requirements.txt`, `README.md`) so a subsequent |
| "Deploy → Inference Endpoints" click on the Hub just works. |
| |
| Usage: |
| hf auth login # or set HF_TOKEN |
| python model_loader.py |
| """ |
|
|
| from __future__ import annotations |
|
|
| from pathlib import Path |
|
|
| from huggingface_hub import HfApi |
| from transformers import AutoModelForSeq2SeqLM, AutoTokenizer |
|
|
| BASE = "facebook/nllb-200-distilled-600M" |
| REPO = "ericaRC/example" |
|
|
| ENDPOINT_FILES = ("handler.py", "requirements.txt", "README.md", "TROUBLESHOOTING.md") |
|
|
|
|
| def push_weights() -> None: |
| tokenizer = AutoTokenizer.from_pretrained(BASE) |
| model = AutoModelForSeq2SeqLM.from_pretrained(BASE) |
|
|
| tokenizer.push_to_hub(REPO) |
| model.push_to_hub(REPO) |
|
|
|
|
| def push_endpoint_files() -> None: |
| api = HfApi() |
| api.create_repo(REPO, exist_ok=True) |
| repo_root = Path(__file__).resolve().parent |
| for name in ENDPOINT_FILES: |
| path = repo_root / name |
| if not path.exists(): |
| print(f"[skip] {name} not found next to model_loader.py") |
| continue |
| api.upload_file( |
| path_or_fileobj=str(path), |
| path_in_repo=name, |
| repo_id=REPO, |
| commit_message=f"Update {name}", |
| ) |
| print(f"[ok] uploaded {name}") |
|
|
|
|
| if __name__ == "__main__": |
| push_weights() |
| push_endpoint_files() |
|
|