| |
| """ |
| publish_cleaned_to_hf.py — Upload a single cleaned.csv to Hugging Face dataset repo. |
| |
| - Ensures the dataset repo exists (public by default). |
| - Uploads the specified file (default: cleaned.csv) to the repo root or given path. |
| - Optionally uploads a minimal README.md dataset card referencing cleaned.csv. |
| |
| Authentication: |
| - Set env var `HF_TOKEN` or `HUGGINGFACE_TOKEN`, or pass `--token`. |
| |
| Usage: |
| python publish_cleaned_to_hf.py --file cleaned.csv --repo-id savedata101/repair_data |
| python publish_cleaned_to_hf.py --file report/cleaned.csv --path-in-repo cleaned.csv |
| python publish_cleaned_to_hf.py --add-readme --title "Repair Data (Cleaned)" |
| """ |
|
|
| from __future__ import annotations |
|
|
| import argparse |
| import os |
| import sys |
| from typing import Optional |
|
|
| try: |
| from huggingface_hub import HfApi |
| except Exception: |
| print("Please install huggingface_hub: pip install huggingface_hub", file=sys.stderr) |
| raise |
|
|
|
|
| def get_token(cli_token: Optional[str]) -> Optional[str]: |
| if cli_token: |
| return cli_token |
| for var in ("HF_TOKEN", "HUGGINGFACE_TOKEN"): |
| t = os.environ.get(var) |
| if t: |
| return t |
| return None |
|
|
|
|
| def main(argv=None) -> int: |
| ap = argparse.ArgumentParser(description="Upload cleaned.csv to Hugging Face dataset repo") |
| ap.add_argument("--repo-id", default="savedata101/repair_data", help="Target repo id") |
| ap.add_argument("--file", default="cleaned.csv", help="Local CSV file to upload") |
| ap.add_argument("--path-in-repo", default="", help="Destination path in repo (default root)") |
| ap.add_argument("--token", default=None, help="Hugging Face access token") |
| ap.add_argument("--private", action="store_true", help="Create as private repo (default public)") |
| ap.add_argument("--add-readme", action="store_true", help="Upload a minimal README.md dataset card") |
| ap.add_argument("--title", default="Repair Data — cleaned.csv", help="Dataset card title") |
| args = ap.parse_args(argv) |
|
|
| token = get_token(args.token) |
| if not token: |
| print("Missing token. Set HF_TOKEN or pass --token.", file=sys.stderr) |
| return 2 |
|
|
| if not os.path.isfile(args.file): |
| print(f"File not found: {args.file}", file=sys.stderr) |
| return 2 |
|
|
| api = HfApi(token=token) |
|
|
| |
| api.create_repo(repo_id=args.repo_id, repo_type="dataset", private=args.private, exist_ok=True) |
|
|
| |
| dest = args.path_in_repo if args.path_in_repo else os.path.basename(args.file) |
| api.upload_file( |
| path_or_fileobj=args.file, |
| repo_id=args.repo_id, |
| repo_type="dataset", |
| path_in_repo=dest, |
| commit_message=f"Upload {dest}", |
| ) |
|
|
| |
| if args.add_readme: |
| card = f"""--- |
| license: other |
| tags: |
| - csv |
| pretty_name: {args.title} |
| --- |
| |
| # {args.title} |
| |
| This dataset hosts a cleaned CSV file for UI preview and use with `datasets`. |
| |
| ## Files |
| |
| - `{dest}` — main data file. |
| |
| ## Load with datasets |
| |
| ```python |
| from datasets import load_dataset |
| ds = load_dataset( |
| "{args.repo_id}", |
| data_files={{{"train": "{dest}"}}}, |
| ) |
| print(ds["train"]) # Dataset |
| ``` |
| """ |
| api.upload_file( |
| path_or_fileobj=card.encode("utf-8"), |
| repo_id=args.repo_id, |
| repo_type="dataset", |
| path_in_repo="README.md", |
| commit_message="Add dataset card", |
| ) |
|
|
| url = f"https://huggingface.co/datasets/{args.repo_id}" |
| print(f"Uploaded {dest}. View dataset: {url}") |
| return 0 |
|
|
|
|
| if __name__ == "__main__": |
| raise SystemExit(main()) |
|
|
|
|