diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_upload_large_folder.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_upload_large_folder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04f86a61647ac1548312d0c6e8b6b379accdae75 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_upload_large_folder.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_webhooks_payload.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_webhooks_payload.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2bec19bea0bf18bdb38e04316121d0a136aa3e38 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_webhooks_payload.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_webhooks_server.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_webhooks_server.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3352b188ad71866110b9e1807a306c3dfae7d88 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/_webhooks_server.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/community.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/community.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3d47aaef7210ef87915d37c63071e7accff48162 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/community.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/constants.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/constants.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41e00ed3a38d3ea55fe2367492d5713b87b27b9a Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/constants.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/dataclasses.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/dataclasses.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..597c6bfe0cdd62ac701716008bdce9c0d30ff73e Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/dataclasses.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/errors.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/errors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..557772ed852c5c396c2dba5913953a57a9936143 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/errors.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/fastai_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/fastai_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4473e03ddae7d737967b4cd9bba633013bee353c Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/fastai_utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/file_download.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/file_download.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7bffc0ce1f854b76d4bd95b9f1732cd0b9cbf81e Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/file_download.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_file_system.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_file_system.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..450f8cc44374a3f80f9d4fc2cae033793b8a45c6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_file_system.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hub_mixin.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hub_mixin.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d6e4bc1a028283e3bc27d4b9602fd1c2bc62ce5d Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hub_mixin.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/lfs.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/lfs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..56e18426d8da7e9bb373159c16b95a4f9457cb21 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/lfs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/repocard.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/repocard.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ebb8ebe49bb4ed67df57dabec35a7d523134544 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/repocard.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/repocard_data.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/repocard_data.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..931dbf782e17f6f77a222ca5b0241982ec571e48 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/repocard_data.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__init__.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8568c82be1c638c0ccd34d460fd8b0f73dcbec4e --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2025 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ebcba801800a7bed75ce190b857d0a190415f838 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/_cli_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/_cli_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d8aaf162de14be127677139b94be8b29b0512470 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/_cli_utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/_errors.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/_errors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a139a118cf9e81a35c4f246eac31a5a0bfdc8ed3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/_errors.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/auth.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/auth.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..21bf4fce1e008e91fe2f60cfc2d3f0007b47f8b3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/auth.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/cache.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/cache.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..584fa90170195de5e0218ee3790db67cab821866 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/cache.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/collections.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/collections.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e1701b59269e36a25b6fba832cd844d3f2e4cee Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/collections.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/datasets.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/datasets.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..200907481458aa33c484910c91ef06888fd02a1d Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/datasets.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/download.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/download.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68a44b97c1f84e289065c6c42ed55650336f34cc Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/download.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/hf.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/hf.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2270af6c92576f722291f31e24c3e87f68445cb3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/hf.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/inference_endpoints.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/inference_endpoints.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3643ec01566f3c524a06e03cf4da01fc5bdf815 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/inference_endpoints.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/jobs.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/jobs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4008a7ea1f0bfdf1191cd20f561ed81e6559cb4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/jobs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/lfs.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/lfs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bd0bea8b7299855a2a015faa5dfc68861a39977a Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/lfs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/models.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f57563e54c2e268e218c9f8716f844343e3b62c0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/models.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/papers.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/papers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81d6bba441d4ab57f2d5afee5f5791d404d2b03f Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/papers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/repo.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/repo.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fbac77185b67e0ed1b45630fc67b4ab2ae6c6bca Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/repo.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/repo_files.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/repo_files.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..158868feb0fcec0f46c8cb15b8ede941a66ae1be Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/repo_files.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/skills.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/skills.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68a214eb6fc55635965d8584c0d8c42627e476a9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/skills.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/spaces.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/spaces.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7f19657053477f05b1ee08e0016dcaa432addae Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/spaces.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/system.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/system.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4ca1a884f7036e64cdec16ab7e29ae8e78f0a63c Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/system.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/upload.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/upload.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..539b364f2003374e88afd702e677a8bb060de5a0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/upload.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/upload_large_folder.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/upload_large_folder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9487776fa11c64b1e0857b1940950f0fe661b11a Binary files /dev/null and b/venv/lib/python3.10/site-packages/huggingface_hub/cli/__pycache__/upload_large_folder.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/_cli_utils.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/_cli_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f0f8608739ad009f8327de362ab7e02a9245bb79 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/_cli_utils.py @@ -0,0 +1,513 @@ +# Copyright 2022 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains CLI utilities (styling, helpers).""" + +import dataclasses +import datetime +import importlib.metadata +import json +import os +import re +import time +from enum import Enum +from pathlib import Path +from typing import TYPE_CHECKING, Annotated, Any, Callable, Literal, Optional, Sequence, Union, cast + +import click +import typer + +from huggingface_hub import __version__, constants +from huggingface_hub.utils import ANSI, get_session, hf_raise_for_status, installation_method, logging, tabulate + + +logger = logging.get_logger() + +# Arbitrary maximum length of a cell in a table output +_MAX_CELL_LENGTH = 35 + +if TYPE_CHECKING: + from huggingface_hub.hf_api import HfApi + + +def get_hf_api(token: Optional[str] = None) -> "HfApi": + # Import here to avoid circular import + from huggingface_hub.hf_api import HfApi + + return HfApi(token=token, library_name="huggingface-cli", library_version=__version__) + + +#### TYPER UTILS + +CLI_REFERENCE_URL = "https://huggingface.co/docs/huggingface_hub/en/guides/cli" + + +def generate_epilog(examples: list[str], docs_anchor: Optional[str] = None) -> str: + """Generate an epilog with examples and a Learn More section. + + Args: + examples: List of example commands (without the `$ ` prefix). + docs_anchor: Optional anchor for the docs URL (e.g., "#hf-download"). + + Returns: + Formatted epilog string. + """ + docs_url = f"{CLI_REFERENCE_URL}{docs_anchor}" if docs_anchor else CLI_REFERENCE_URL + examples_str = "\n".join(f" $ {ex}" for ex in examples) + return f"""\ +Examples +{examples_str} + +Learn more + Use `hf --help` for more information about a command. + Read the documentation at {docs_url} +""" + + +TOPIC_T = Union[Literal["main", "help"], str] + + +def _format_epilog_no_indent(epilog: Optional[str], ctx: click.Context, formatter: click.HelpFormatter) -> None: + """Write the epilog without indentation.""" + if epilog: + formatter.write_paragraph() + for line in epilog.split("\n"): + formatter.write_text(line) + + +class HFCliTyperGroup(typer.core.TyperGroup): + """ + Typer Group that: + - lists commands alphabetically within sections. + - separates commands by topic (main, help, etc.). + - formats epilog without extra indentation. + """ + + def format_commands(self, ctx: click.Context, formatter: click.HelpFormatter) -> None: + topics: dict[str, list] = {} + + for name in self.list_commands(ctx): + cmd = self.get_command(ctx, name) + if cmd is None or cmd.hidden: + continue + help_text = cmd.get_short_help_str(limit=formatter.width) + topic = getattr(cmd, "topic", "main") + topics.setdefault(topic, []).append((name, help_text)) + + with formatter.section("Main commands"): + formatter.write_dl(topics["main"]) + for topic in sorted(topics.keys()): + if topic == "main": + continue + with formatter.section(f"{topic.capitalize()} commands"): + formatter.write_dl(topics[topic]) + + def format_epilog(self, ctx: click.Context, formatter: click.HelpFormatter) -> None: + # Collect examples from all commands + all_examples: list[str] = [] + for name in self.list_commands(ctx): + cmd = self.get_command(ctx, name) + if cmd is None or cmd.hidden: + continue + cmd_examples = getattr(cmd, "examples", []) + all_examples.extend(cmd_examples) + + if all_examples: + epilog = generate_epilog(all_examples) + _format_epilog_no_indent(epilog, ctx, formatter) + elif self.epilog: + _format_epilog_no_indent(self.epilog, ctx, formatter) + + def list_commands(self, ctx: click.Context) -> list[str]: # type: ignore[name-defined] + # click.Group stores both commands and subgroups in `self.commands` + return sorted(self.commands.keys()) + + +def HFCliCommand(topic: TOPIC_T, examples: Optional[list[str]] = None) -> type[typer.core.TyperCommand]: + def format_epilog(self: click.Command, ctx: click.Context, formatter: click.HelpFormatter) -> None: + _format_epilog_no_indent(self.epilog, ctx, formatter) + + return type( + f"TyperCommand{topic.capitalize()}", + (typer.core.TyperCommand,), + {"topic": topic, "examples": examples or [], "format_epilog": format_epilog}, + ) + + +class HFCliApp(typer.Typer): + """Custom Typer app for Hugging Face CLI.""" + + def command( # type: ignore[override] + self, + name: Optional[str] = None, + *, + topic: TOPIC_T = "main", + examples: Optional[list[str]] = None, + context_settings: Optional[dict[str, Any]] = None, + help: Optional[str] = None, + epilog: Optional[str] = None, + short_help: Optional[str] = None, + options_metavar: str = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + rich_help_panel: Optional[str] = None, + ) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + # Generate epilog from examples if not explicitly provided + if epilog is None and examples: + epilog = generate_epilog(examples) + + def _inner(func: Callable[..., Any]) -> Callable[..., Any]: + return super(HFCliApp, self).command( + name, + cls=HFCliCommand(topic, examples), + context_settings=context_settings, + help=help, + epilog=epilog, + short_help=short_help, + options_metavar=options_metavar, + add_help_option=add_help_option, + no_args_is_help=no_args_is_help, + hidden=hidden, + deprecated=deprecated, + rich_help_panel=rich_help_panel, + )(func) + + return _inner + + +def typer_factory(help: str, epilog: Optional[str] = None) -> "HFCliApp": + """Create a Typer app with consistent settings. + + Args: + help: Help text for the app. + epilog: Optional epilog text (use `generate_epilog` to create one). + + Returns: + A configured Typer app. + """ + return HFCliApp( + help=help, + epilog=epilog, + add_completion=True, + no_args_is_help=True, + cls=HFCliTyperGroup, + # Disable rich completely for consistent experience + rich_markup_mode=None, + rich_help_panel=None, + pretty_exceptions_enable=False, + # Increase max content width for better readability + context_settings={ + "max_content_width": 120, + }, + ) + + +class RepoType(str, Enum): + model = "model" + dataset = "dataset" + space = "space" + + +RepoIdArg = Annotated[ + str, + typer.Argument( + help="The ID of the repo (e.g. `username/repo-name`).", + ), +] + + +RepoTypeOpt = Annotated[ + RepoType, + typer.Option( + help="The type of repository (model, dataset, or space).", + ), +] + +TokenOpt = Annotated[ + Optional[str], + typer.Option( + help="A User Access Token generated from https://huggingface.co/settings/tokens.", + ), +] + +PrivateOpt = Annotated[ + Optional[bool], + typer.Option( + help="Whether to create a private repo if repo doesn't exist on the Hub. Ignored if the repo already exists.", + ), +] + +RevisionOpt = Annotated[ + Optional[str], + typer.Option( + help="Git revision id which can be a branch name, a tag, or a commit hash.", + ), +] + + +LimitOpt = Annotated[ + int, + typer.Option(help="Limit the number of results."), +] + +AuthorOpt = Annotated[ + Optional[str], + typer.Option(help="Filter by author or organization."), +] + +FilterOpt = Annotated[ + Optional[list[str]], + typer.Option(help="Filter by tags (e.g. 'text-classification'). Can be used multiple times."), +] + +SearchOpt = Annotated[ + Optional[str], + typer.Option(help="Search query."), +] + + +class OutputFormat(str, Enum): + """Output format for CLI list commands.""" + + table = "table" + json = "json" + + +FormatOpt = Annotated[ + OutputFormat, + typer.Option( + help="Output format (table or json).", + ), +] + +QuietOpt = Annotated[ + bool, + typer.Option( + "-q", + "--quiet", + help="Print only IDs (one per line).", + ), +] + + +def _to_header(name: str) -> str: + """Convert a camelCase or PascalCase string to SCREAMING_SNAKE_CASE to be used as table header.""" + s = re.sub(r"([a-z])([A-Z])", r"\1_\2", name) + return s.upper() + + +def _format_value(value: Any) -> str: + """Convert a value to string for terminal display.""" + if not value: + return "" + if isinstance(value, bool): + return "✔" if value else "" + if isinstance(value, datetime.datetime): + return value.strftime("%Y-%m-%d") + if isinstance(value, str) and re.match(r"^\d{4}-\d{2}-\d{2}T", value): + return value[:10] + if isinstance(value, list): + return ", ".join(_format_value(v) for v in value) + elif isinstance(value, dict): + if "name" in value: # Likely to be a user or org => print name + return str(value["name"]) + # TODO: extend if needed + return json.dumps(value) + return str(value) + + +def _format_cell(value: Any, max_len: int = _MAX_CELL_LENGTH) -> str: + """Format a value + truncate it for table display.""" + cell = _format_value(value) + if len(cell) > max_len: + cell = cell[: max_len - 3] + "..." + return cell + + +def print_as_table( + items: Sequence[dict[str, Any]], + headers: list[str], + row_fn: Callable[[dict[str, Any]], list[str]], +) -> None: + """Print items as a formatted table. + + Args: + items: Sequence of dictionaries representing the items to display. + headers: List of column headers. + row_fn: Function that takes an item dict and returns a list of string values for each column. + """ + if not items: + print("No results found.") + return + rows = cast(list[list[Union[str, int]]], [row_fn(item) for item in items]) + print(tabulate(rows, headers=[_to_header(h) for h in headers])) + + +def print_list_output( + items: Sequence[dict[str, Any]], + format: OutputFormat, + quiet: bool, + id_key: str = "id", + headers: Optional[list[str]] = None, + row_fn: Optional[Callable[[dict[str, Any]], list[str]]] = None, +) -> None: + """Print list command output in the specified format. + + Args: + items: Sequence of dictionaries representing the items to display. + format: Output format (table or json). + quiet: If True, print only IDs (one per line). + id_key: Key to use for extracting IDs in quiet mode. + headers: Optional list of column names for headers. If not provided, auto-detected from keys. + row_fn: Optional function to extract row values. If not provided, uses _format_cell on each column. + """ + if quiet: + for item in items: + print(item[id_key]) + return + + if format == OutputFormat.json: + print(json.dumps(list(items), indent=2)) + return + + if headers is None: + all_columns = list(items[0].keys()) if items else [id_key] + headers = [col for col in all_columns if any(_format_cell(item.get(col)) for item in items)] + + if row_fn is None: + + def row_fn(item: dict[str, Any]) -> list[str]: + return [_format_cell(item.get(col)) for col in headers] # type: ignore[union-attr] + + print_as_table(items, headers=headers, row_fn=row_fn) + + +def _serialize_value(v: object) -> object: + """Recursively serialize a value to be JSON-compatible.""" + if isinstance(v, datetime.datetime): + return v.isoformat() + elif isinstance(v, dict): + return {key: _serialize_value(val) for key, val in v.items() if val is not None} + elif isinstance(v, list): + return [_serialize_value(item) for item in v] + return v + + +def api_object_to_dict(info: Any) -> dict[str, Any]: + """Convert repo info dataclasses to json-serializable dicts.""" + return {k: _serialize_value(v) for k, v in dataclasses.asdict(info).items() if v is not None} + + +def make_expand_properties_parser(valid_properties: list[str]): + """Create a callback to parse and validate comma-separated expand properties.""" + + def _parse_expand_properties(value: Optional[str]) -> Optional[list[str]]: + if value is None: + return None + properties = [p.strip() for p in value.split(",")] + for prop in properties: + if prop not in valid_properties: + raise typer.BadParameter( + f"Invalid expand property: '{prop}'. Valid values are: {', '.join(valid_properties)}" + ) + return properties + + return _parse_expand_properties + + +### PyPI VERSION CHECKER + + +def check_cli_update(library: Literal["huggingface_hub", "transformers"]) -> None: + """ + Check whether a newer version of a library is available on PyPI. + + If a newer version is found, notify the user and suggest updating. + If current version is a pre-release (e.g. `1.0.0.rc1`), or a dev version (e.g. `1.0.0.dev1`), no check is performed. + + This function is called at the entry point of the CLI. It only performs the check once every 24 hours, and any error + during the check is caught and logged, to avoid breaking the CLI. + + Args: + library: The library to check for updates. Currently supports "huggingface_hub" and "transformers". + """ + try: + _check_cli_update(library) + except Exception: + # We don't want the CLI to fail on version checks, no matter the reason. + logger.debug("Error while checking for CLI update.", exc_info=True) + + +def _check_cli_update(library: Literal["huggingface_hub", "transformers"]) -> None: + current_version = importlib.metadata.version(library) + + # Skip if current version is a pre-release or dev version + if any(tag in current_version for tag in ["rc", "dev"]): + return + + # Skip if already checked in the last 24 hours + if os.path.exists(constants.CHECK_FOR_UPDATE_DONE_PATH): + mtime = os.path.getmtime(constants.CHECK_FOR_UPDATE_DONE_PATH) + if (time.time() - mtime) < 24 * 3600: + return + + # Touch the file to mark that we did the check now + Path(constants.CHECK_FOR_UPDATE_DONE_PATH).parent.mkdir(parents=True, exist_ok=True) + Path(constants.CHECK_FOR_UPDATE_DONE_PATH).touch() + + # Check latest version from PyPI + response = get_session().get(f"https://pypi.org/pypi/{library}/json", timeout=2) + hf_raise_for_status(response) + data = response.json() + latest_version = data["info"]["version"] + + # If latest version is different from current, notify user + if current_version != latest_version: + if library == "huggingface_hub": + update_command = _get_huggingface_hub_update_command() + else: + update_command = _get_transformers_update_command() + + click.echo( + ANSI.yellow( + f"A new version of {library} ({latest_version}) is available! " + f"You are using version {current_version}.\n" + f"To update, run: {ANSI.bold(update_command)}\n", + ) + ) + + +def _get_huggingface_hub_update_command() -> str: + """Return the command to update huggingface_hub.""" + method = installation_method() + if method == "brew": + return "brew upgrade huggingface-cli" + elif method == "hf_installer" and os.name == "nt": + return 'powershell -NoProfile -Command "iwr -useb https://hf.co/cli/install.ps1 | iex"' + elif method == "hf_installer": + return "curl -LsSf https://hf.co/cli/install.sh | bash -" + else: # unknown => likely pip + return "pip install -U huggingface_hub" + + +def _get_transformers_update_command() -> str: + """Return the command to update transformers.""" + method = installation_method() + if method == "hf_installer" and os.name == "nt": + return 'powershell -NoProfile -Command "iwr -useb https://hf.co/cli/install.ps1 | iex" -WithTransformers' + elif method == "hf_installer": + return "curl -LsSf https://hf.co/cli/install.sh | bash -s -- --with-transformers" + else: # brew/unknown => likely pip + return "pip install -U transformers" diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/_errors.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/_errors.py new file mode 100644 index 0000000000000000000000000000000000000000..611a2fe90db9d078eb6623ca56a2caa0ccd96925 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/_errors.py @@ -0,0 +1,44 @@ +# Copyright 2026 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""CLI error handling utilities.""" + +from typing import Callable, Optional + +from huggingface_hub.errors import ( + GatedRepoError, + HfHubHTTPError, + LocalTokenNotFoundError, + RemoteEntryNotFoundError, + RepositoryNotFoundError, + RevisionNotFoundError, +) + + +CLI_ERROR_MAPPINGS: dict[type[Exception], Callable[[Exception], str]] = { + RepositoryNotFoundError: lambda e: ( + "Repository not found. Check the `repo_id` and `repo_type` parameters. If the repo is private, make sure you are authenticated." + ), + RevisionNotFoundError: lambda e: "Revision not found. Check the `revision` parameter.", + GatedRepoError: lambda e: "Access denied. This repository requires approval.", + LocalTokenNotFoundError: lambda e: "Not logged in. Run 'hf auth login' first.", + RemoteEntryNotFoundError: lambda e: "File not found in repository.", + HfHubHTTPError: lambda e: str(e), +} + + +def format_known_exception(e: Exception) -> Optional[str]: + for exc_type, formatter in CLI_ERROR_MAPPINGS.items(): + if isinstance(e, exc_type): + return formatter(e) + return None diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/auth.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..9cf595db389dbc0d6b5f6ad9c7c3837c805ddcc0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/auth.py @@ -0,0 +1,157 @@ +# Copyright 2020 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains commands to authenticate to the Hugging Face Hub and interact with your repositories. + +Usage: + # login and save token locally. + hf auth login --token=hf_*** --add-to-git-credential + + # switch between tokens + hf auth switch + + # list all tokens + hf auth list + + # logout from all tokens + hf auth logout + + # check which account you are logged in as + hf auth whoami +""" + +from typing import Annotated, Optional + +import typer + +from huggingface_hub.constants import ENDPOINT +from huggingface_hub.hf_api import whoami + +from .._login import auth_list, auth_switch, login, logout +from ..utils import ANSI, get_stored_tokens, get_token, logging +from ._cli_utils import TokenOpt, typer_factory + + +logger = logging.get_logger(__name__) + + +auth_cli = typer_factory(help="Manage authentication (login, logout, etc.).") + + +@auth_cli.command( + "login", + examples=[ + "hf auth login", + "hf auth login --token $HF_TOKEN", + "hf auth login --token $HF_TOKEN --add-to-git-credential", + ], +) +def auth_login( + token: TokenOpt = None, + add_to_git_credential: Annotated[ + bool, + typer.Option( + help="Save to git credential helper. Useful only if you plan to run git commands directly.", + ), + ] = False, +) -> None: + """Login using a token from huggingface.co/settings/tokens.""" + login(token=token, add_to_git_credential=add_to_git_credential) + + +@auth_cli.command( + "logout", + examples=["hf auth logout", "hf auth logout --token-name my-token"], +) +def auth_logout( + token_name: Annotated[ + Optional[str], + typer.Option(help="Name of token to logout"), + ] = None, +) -> None: + """Logout from a specific token.""" + logout(token_name=token_name) + + +def _select_token_name() -> Optional[str]: + token_names = list(get_stored_tokens().keys()) + + if not token_names: + logger.error("No stored tokens found. Please login first.") + return None + + print("Available stored tokens:") + for i, token_name in enumerate(token_names, 1): + print(f"{i}. {token_name}") + while True: + try: + choice = input("Enter the number of the token to switch to (or 'q' to quit): ") + if choice.lower() == "q": + return None + index = int(choice) - 1 + if 0 <= index < len(token_names): + return token_names[index] + else: + print("Invalid selection. Please try again.") + except ValueError: + print("Invalid input. Please enter a number or 'q' to quit.") + + +@auth_cli.command( + "switch", + examples=["hf auth switch", "hf auth switch --token-name my-token"], +) +def auth_switch_cmd( + token_name: Annotated[ + Optional[str], + typer.Option( + help="Name of the token to switch to", + ), + ] = None, + add_to_git_credential: Annotated[ + bool, + typer.Option( + help="Save to git credential helper. Useful only if you plan to run git commands directly.", + ), + ] = False, +) -> None: + """Switch between access tokens.""" + if token_name is None: + token_name = _select_token_name() + if token_name is None: + print("No token name provided. Aborting.") + raise typer.Exit() + auth_switch(token_name, add_to_git_credential=add_to_git_credential) + + +@auth_cli.command("list", examples=["hf auth list"]) +def auth_list_cmd() -> None: + """List all stored access tokens.""" + auth_list() + + +@auth_cli.command("whoami", examples=["hf auth whoami"]) +def auth_whoami() -> None: + """Find out which huggingface.co account you are logged in as.""" + token = get_token() + if token is None: + print("Not logged in") + raise typer.Exit() + info = whoami(token) + print(ANSI.bold("user: "), info["name"]) + orgs = [org["name"] for org in info["orgs"]] + if orgs: + print(ANSI.bold("orgs: "), ",".join(orgs)) + + if ENDPOINT != "https://huggingface.co": + print(f"Authenticated through private endpoint: {ENDPOINT}") diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/cache.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/cache.py new file mode 100644 index 0000000000000000000000000000000000000000..0aae9bab187840822a18fc41c6da6ece19c1bab2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/cache.py @@ -0,0 +1,811 @@ +# coding=utf-8 +# Copyright 2025-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains the 'hf cache' command group with cache management subcommands.""" + +import json +import re +import sys +import time +from collections import defaultdict +from dataclasses import dataclass +from enum import Enum +from typing import Annotated, Any, Callable, Dict, List, Mapping, Optional, Tuple + +import typer + +from huggingface_hub.errors import CLIError + +from ..utils import ( + ANSI, + CachedRepoInfo, + CachedRevisionInfo, + CacheNotFound, + HFCacheInfo, + _format_size, + scan_cache_dir, + tabulate, +) +from ..utils._parsing import parse_duration, parse_size +from ._cli_utils import ( + OutputFormat, + RepoIdArg, + RepoTypeOpt, + RevisionOpt, + TokenOpt, + get_hf_api, + typer_factory, +) + + +cache_cli = typer_factory(help="Manage local cache directory.") + + +#### Cache helper utilities + + +@dataclass(frozen=True) +class _DeletionResolution: + revisions: frozenset[str] + selected: dict[CachedRepoInfo, frozenset[CachedRevisionInfo]] + missing: tuple[str, ...] + + +_FILTER_PATTERN = re.compile(r"^(?P[a-zA-Z_]+)\s*(?P==|!=|>=|<=|>|<|=)\s*(?P.+)$") +_ALLOWED_OPERATORS = {"=", "!=", ">", "<", ">=", "<="} +_FILTER_KEYS = {"accessed", "modified", "refs", "size", "type"} +_SORT_KEYS = {"accessed", "modified", "name", "size"} +_SORT_PATTERN = re.compile(r"^(?P[a-zA-Z_]+)(?::(?Pasc|desc))?$") +_SORT_DEFAULT_ORDER = { + # Default ordering: accessed/modified/size are descending (newest/biggest first), name is ascending + "accessed": "desc", + "modified": "desc", + "size": "desc", + "name": "asc", +} + + +# Dynamically generate SortOptions enum from _SORT_KEYS +_sort_options_dict = {} +for key in sorted(_SORT_KEYS): + _sort_options_dict[key] = key + _sort_options_dict[f"{key}_asc"] = f"{key}:asc" + _sort_options_dict[f"{key}_desc"] = f"{key}:desc" + +SortOptions = Enum("SortOptions", _sort_options_dict, type=str, module=__name__) # type: ignore + + +@dataclass(frozen=True) +class CacheDeletionCounts: + """Simple counters summarizing cache deletions for CLI messaging.""" + + repo_count: int + partial_revision_count: int + total_revision_count: int + + +CacheEntry = Tuple[CachedRepoInfo, Optional[CachedRevisionInfo]] +RepoRefsMap = Dict[CachedRepoInfo, frozenset[str]] + + +def summarize_deletions( + selected_by_repo: Mapping[CachedRepoInfo, frozenset[CachedRevisionInfo]], +) -> CacheDeletionCounts: + """Summarize deletions across repositories.""" + repo_count = 0 + total_revisions = 0 + revisions_in_full_repos = 0 + + for repo, revisions in selected_by_repo.items(): + total_revisions += len(revisions) + if len(revisions) == len(repo.revisions): + repo_count += 1 + revisions_in_full_repos += len(revisions) + + partial_revision_count = total_revisions - revisions_in_full_repos + return CacheDeletionCounts(repo_count, partial_revision_count, total_revisions) + + +def print_cache_selected_revisions(selected_by_repo: Mapping[CachedRepoInfo, frozenset[CachedRevisionInfo]]) -> None: + """Pretty-print selected cache revisions during confirmation prompts.""" + for repo in sorted(selected_by_repo.keys(), key=lambda repo: (repo.repo_type, repo.repo_id.lower())): + repo_key = f"{repo.repo_type}/{repo.repo_id}" + revisions = sorted(selected_by_repo[repo], key=lambda rev: rev.commit_hash) + if len(revisions) == len(repo.revisions): + print(f" - {repo_key} (entire repo)") + continue + + print(f" - {repo_key}:") + for revision in revisions: + refs = " ".join(sorted(revision.refs)) or "(detached)" + print(f" {revision.commit_hash} [{refs}] {revision.size_on_disk_str}") + + +def build_cache_index( + hf_cache_info: HFCacheInfo, +) -> Tuple[ + Dict[str, CachedRepoInfo], + Dict[str, Tuple[CachedRepoInfo, CachedRevisionInfo]], +]: + """Create lookup tables so CLI commands can resolve repo ids and revisions quickly.""" + repo_lookup: dict[str, CachedRepoInfo] = {} + revision_lookup: dict[str, tuple[CachedRepoInfo, CachedRevisionInfo]] = {} + for repo in hf_cache_info.repos: + repo_key = repo.cache_id.lower() + repo_lookup[repo_key] = repo + for revision in repo.revisions: + revision_lookup[revision.commit_hash.lower()] = (repo, revision) + return repo_lookup, revision_lookup + + +def collect_cache_entries( + hf_cache_info: HFCacheInfo, *, include_revisions: bool +) -> Tuple[List[CacheEntry], RepoRefsMap]: + """Flatten cache metadata into rows consumed by `hf cache ls`.""" + entries: List[CacheEntry] = [] + repo_refs_map: RepoRefsMap = {} + sorted_repos = sorted(hf_cache_info.repos, key=lambda repo: (repo.repo_type, repo.repo_id.lower())) + for repo in sorted_repos: + repo_refs_map[repo] = frozenset({ref for revision in repo.revisions for ref in revision.refs}) + if include_revisions: + for revision in sorted(repo.revisions, key=lambda rev: rev.commit_hash): + entries.append((repo, revision)) + else: + entries.append((repo, None)) + if include_revisions: + entries.sort( + key=lambda entry: ( + entry[0].cache_id, + entry[1].commit_hash if entry[1] is not None else "", + ) + ) + else: + entries.sort(key=lambda entry: entry[0].cache_id) + return entries, repo_refs_map + + +def compile_cache_filter( + expr: str, repo_refs_map: RepoRefsMap +) -> Callable[[CachedRepoInfo, Optional[CachedRevisionInfo], float], bool]: + """Convert a `hf cache ls` filter expression into the yes/no test we apply to each cache entry before displaying it.""" + match = _FILTER_PATTERN.match(expr.strip()) + if not match: + raise ValueError(f"Invalid filter expression: '{expr}'.") + + key = match.group("key").lower() + op = match.group("op") + value_raw = match.group("value").strip() + + if op not in _ALLOWED_OPERATORS: + raise ValueError(f"Unsupported operator '{op}' in filter '{expr}'. Must be one of {list(_ALLOWED_OPERATORS)}.") + + if key not in _FILTER_KEYS: + raise ValueError(f"Unsupported filter key '{key}' in '{expr}'. Must be one of {list(_FILTER_KEYS)}.") + # at this point we know that key is in `_FILTER_KEYS` + if key == "size": + size_threshold = parse_size(value_raw) + return lambda repo, revision, _: _compare_numeric( + revision.size_on_disk if revision is not None else repo.size_on_disk, + op, + size_threshold, + ) + + if key in {"modified", "accessed"}: + seconds = parse_duration(value_raw.strip()) + + def _time_filter(repo: CachedRepoInfo, revision: Optional[CachedRevisionInfo], now: float) -> bool: + timestamp = ( + repo.last_accessed + if key == "accessed" + else revision.last_modified + if revision is not None + else repo.last_modified + ) + if timestamp is None: + return False + return _compare_numeric(now - timestamp, op, seconds) + + return _time_filter + + if key == "type": + expected = value_raw.lower() + + if op != "=": + raise ValueError(f"Only '=' is supported for 'type' filters. Got '{op}'.") + + def _type_filter(repo: CachedRepoInfo, revision: Optional[CachedRevisionInfo], _: float) -> bool: + return repo.repo_type.lower() == expected + + return _type_filter + + else: # key == "refs" + if op != "=": + raise ValueError(f"Only '=' is supported for 'refs' filters. Got {op}.") + + def _refs_filter(repo: CachedRepoInfo, revision: Optional[CachedRevisionInfo], _: float) -> bool: + refs = revision.refs if revision is not None else repo_refs_map.get(repo, frozenset()) + return value_raw.lower() in [ref.lower() for ref in refs] + + return _refs_filter + + +def _build_cache_export_payload( + entries: List[CacheEntry], *, include_revisions: bool, repo_refs_map: RepoRefsMap +) -> List[Dict[str, Any]]: + """Normalize cache entries into serializable records for JSON/CSV exports.""" + payload: List[Dict[str, Any]] = [] + for repo, revision in entries: + if include_revisions: + if revision is None: + continue + record: Dict[str, Any] = { + "repo_id": repo.repo_id, + "repo_type": repo.repo_type, + "revision": revision.commit_hash, + "snapshot_path": str(revision.snapshot_path), + "size_on_disk": revision.size_on_disk, + "last_accessed": repo.last_accessed, + "last_modified": revision.last_modified, + "refs": sorted(revision.refs), + } + else: + record = { + "repo_id": repo.repo_id, + "repo_type": repo.repo_type, + "size_on_disk": repo.size_on_disk, + "last_accessed": repo.last_accessed, + "last_modified": repo.last_modified, + "refs": sorted(repo_refs_map.get(repo, frozenset())), + } + payload.append(record) + return payload + + +def print_cache_entries_table( + entries: List[CacheEntry], *, include_revisions: bool, repo_refs_map: RepoRefsMap +) -> None: + """Render cache entries as a table and show a human-readable summary.""" + if not entries: + message = "No cached revisions found." if include_revisions else "No cached repositories found." + print(message) + return + table_rows: List[List[str]] + if include_revisions: + headers = ["ID", "REVISION", "SIZE", "LAST_MODIFIED", "REFS"] + table_rows = [ + [ + repo.cache_id, + revision.commit_hash, + revision.size_on_disk_str.rjust(8), + revision.last_modified_str, + " ".join(sorted(revision.refs)), + ] + for repo, revision in entries + if revision is not None + ] + else: + headers = ["ID", "SIZE", "LAST_ACCESSED", "LAST_MODIFIED", "REFS"] + table_rows = [ + [ + repo.cache_id, + repo.size_on_disk_str.rjust(8), + repo.last_accessed_str or "", + repo.last_modified_str, + " ".join(sorted(repo_refs_map.get(repo, frozenset()))), + ] + for repo, _ in entries + ] + + print(tabulate(table_rows, headers=headers)) # type: ignore[arg-type] + + unique_repos = {repo for repo, _ in entries} + repo_count = len(unique_repos) + if include_revisions: + revision_count = sum(1 for _, revision in entries if revision is not None) + total_size = sum(revision.size_on_disk for _, revision in entries if revision is not None) + else: + revision_count = sum(len(repo.revisions) for repo in unique_repos) + total_size = sum(repo.size_on_disk for repo in unique_repos) + + summary = f"\nFound {repo_count} repo(s) for a total of {revision_count} revision(s) and {_format_size(total_size)} on disk." + print(ANSI.bold(summary)) + + +def print_cache_entries_json( + entries: List[CacheEntry], *, include_revisions: bool, repo_refs_map: RepoRefsMap +) -> None: + """Dump cache entries as JSON for scripting or automation.""" + payload = _build_cache_export_payload(entries, include_revisions=include_revisions, repo_refs_map=repo_refs_map) + json.dump(payload, sys.stdout, indent=2) + sys.stdout.write("\n") + + +def _compare_numeric(left: Optional[float], op: str, right: float) -> bool: + """Evaluate numeric comparisons for filters.""" + if left is None: + return False + + comparisons = { + "=": left == right, + "!=": left != right, + ">": left > right, + "<": left < right, + ">=": left >= right, + "<=": left <= right, + } + + if op not in comparisons: + raise ValueError(f"Unsupported numeric comparison operator: {op}") + + return comparisons[op] + + +def compile_cache_sort(sort_expr: str) -> tuple[Callable[[CacheEntry], tuple[Any, ...]], bool]: + """Convert a `hf cache ls` sort expression into a key function for sorting entries. + + Returns: + A tuple of (key_function, reverse_flag) where reverse_flag indicates whether + to sort in descending order (True) or ascending order (False). + """ + match = _SORT_PATTERN.match(sort_expr.strip().lower()) + if not match: + raise ValueError(f"Invalid sort expression: '{sort_expr}'. Expected format: 'key' or 'key:asc' or 'key:desc'.") + + key = match.group("key").lower() + explicit_order = match.group("order") + + if key not in _SORT_KEYS: + raise ValueError(f"Unsupported sort key '{key}' in '{sort_expr}'. Must be one of {list(_SORT_KEYS)}.") + + # Use explicit order if provided, otherwise use default for the key + order = explicit_order if explicit_order else _SORT_DEFAULT_ORDER[key] + reverse = order == "desc" + + def _sort_key(entry: CacheEntry) -> tuple[Any, ...]: + repo, revision = entry + + if key == "name": + # Sort by cache_id (repo type/id) + value: Any = repo.cache_id.lower() + return (value,) + + if key == "size": + # Use revision size if available, otherwise repo size + value = revision.size_on_disk if revision is not None else repo.size_on_disk + return (value,) + + if key == "accessed": + # For revisions, accessed is not available per-revision, use repo's last_accessed + # For repos, use repo's last_accessed + value = repo.last_accessed if repo.last_accessed is not None else 0.0 + return (value,) + + if key == "modified": + # Use revision's last_modified if available, otherwise repo's last_modified + if revision is not None: + value = revision.last_modified if revision.last_modified is not None else 0.0 + else: + value = repo.last_modified if repo.last_modified is not None else 0.0 + return (value,) + + # Should never reach here due to validation above + raise ValueError(f"Unsupported sort key: {key}") + + return _sort_key, reverse + + +def _resolve_deletion_targets(hf_cache_info: HFCacheInfo, targets: list[str]) -> _DeletionResolution: + """Resolve the deletion targets into a deletion resolution.""" + repo_lookup, revision_lookup = build_cache_index(hf_cache_info) + + selected: dict[CachedRepoInfo, set[CachedRevisionInfo]] = defaultdict(set) + revisions: set[str] = set() + missing: list[str] = [] + + for raw_target in targets: + target = raw_target.strip() + if not target: + continue + lowered = target.lower() + + if re.fullmatch(r"[0-9a-fA-F]{40}", lowered): + match = revision_lookup.get(lowered) + if match is None: + missing.append(raw_target) + continue + repo, revision = match + selected[repo].add(revision) + revisions.add(revision.commit_hash) + continue + + matched_repo = repo_lookup.get(lowered) + if matched_repo is None: + missing.append(raw_target) + continue + + for revision in matched_repo.revisions: + selected[matched_repo].add(revision) + revisions.add(revision.commit_hash) + + frozen_selected = {repo: frozenset(revs) for repo, revs in selected.items()} + return _DeletionResolution( + revisions=frozenset(revisions), + selected=frozen_selected, + missing=tuple(missing), + ) + + +#### Cache CLI commands + + +@cache_cli.command( + examples=[ + "hf cache ls", + "hf cache ls --revisions", + 'hf cache ls --filter "size>1GB" --limit 20', + "hf cache ls --format json", + ], +) +def ls( + cache_dir: Annotated[ + Optional[str], + typer.Option( + help="Cache directory to scan (defaults to Hugging Face cache).", + ), + ] = None, + revisions: Annotated[ + bool, + typer.Option( + help="Include revisions in the output instead of aggregated repositories.", + ), + ] = False, + filter: Annotated[ + Optional[list[str]], + typer.Option( + "-f", + "--filter", + help="Filter entries (e.g. 'size>1GB', 'type=model', 'accessed>7d'). Can be used multiple times.", + ), + ] = None, + format: Annotated[ + OutputFormat, + typer.Option( + help="Output format.", + ), + ] = OutputFormat.table, + quiet: Annotated[ + bool, + typer.Option( + "-q", + "--quiet", + help="Print only IDs (repo IDs or revision hashes).", + ), + ] = False, + sort: Annotated[ + Optional[SortOptions], + typer.Option( + help="Sort entries by key. Supported keys: 'accessed', 'modified', 'name', 'size'. " + "Append ':asc' or ':desc' to explicitly set the order (e.g., 'modified:asc'). " + "Defaults: 'accessed', 'modified', 'size' default to 'desc' (newest/biggest first); " + "'name' defaults to 'asc' (alphabetical).", + ), + ] = None, + limit: Annotated[ + Optional[int], + typer.Option( + help="Limit the number of results returned. Returns only the top N entries after sorting.", + ), + ] = None, +) -> None: + """List cached repositories or revisions.""" + try: + hf_cache_info = scan_cache_dir(cache_dir) + except CacheNotFound as exc: + raise CLIError(f"Cache directory not found: {exc.cache_dir}") from exc + + filters = filter or [] + + entries, repo_refs_map = collect_cache_entries(hf_cache_info, include_revisions=revisions) + try: + filter_fns = [compile_cache_filter(expr, repo_refs_map) for expr in filters] + except ValueError as exc: + raise typer.BadParameter(str(exc)) from exc + + now = time.time() + for fn in filter_fns: + entries = [entry for entry in entries if fn(entry[0], entry[1], now)] + + # Apply sorting if requested + if sort: + try: + sort_key_fn, reverse = compile_cache_sort(sort.value) + entries.sort(key=sort_key_fn, reverse=reverse) + except ValueError as exc: + raise typer.BadParameter(str(exc)) from exc + + # Apply limit if requested + if limit is not None: + if limit < 0: + raise typer.BadParameter(f"Limit must be a positive integer, got {limit}.") + entries = entries[:limit] + + if quiet: + for repo, revision in entries: + print(revision.commit_hash if revision is not None else repo.cache_id) + return + + formatters = { + OutputFormat.table: print_cache_entries_table, + OutputFormat.json: print_cache_entries_json, + } + return formatters[format](entries, include_revisions=revisions, repo_refs_map=repo_refs_map) + + +@cache_cli.command( + examples=[ + "hf cache rm model/gpt2", + "hf cache rm ", + "hf cache rm model/gpt2 --dry-run", + "hf cache rm model/gpt2 --yes", + ], +) +def rm( + targets: Annotated[ + list[str], + typer.Argument( + help="One or more repo IDs (e.g. model/bert-base-uncased) or revision hashes to delete.", + ), + ], + cache_dir: Annotated[ + Optional[str], + typer.Option( + help="Cache directory to scan (defaults to Hugging Face cache).", + ), + ] = None, + yes: Annotated[ + bool, + typer.Option( + "-y", + "--yes", + help="Skip confirmation prompt.", + ), + ] = False, + dry_run: Annotated[ + bool, + typer.Option( + help="Preview deletions without removing anything.", + ), + ] = False, +) -> None: + """Remove cached repositories or revisions.""" + try: + hf_cache_info = scan_cache_dir(cache_dir) + except CacheNotFound as exc: + raise CLIError(f"Cache directory not found: {exc.cache_dir}") from exc + + resolution = _resolve_deletion_targets(hf_cache_info, targets) + + if resolution.missing: + print("Could not find the following targets in the cache:") + for entry in resolution.missing: + print(f" - {entry}") + + if len(resolution.revisions) == 0: + print("Nothing to delete.") + raise typer.Exit(code=0) + + strategy = hf_cache_info.delete_revisions(*sorted(resolution.revisions)) + counts = summarize_deletions(resolution.selected) + + summary_parts: list[str] = [] + if counts.repo_count: + summary_parts.append(f"{counts.repo_count} repo(s)") + if counts.partial_revision_count: + summary_parts.append(f"{counts.partial_revision_count} revision(s)") + if not summary_parts: + summary_parts.append(f"{counts.total_revision_count} revision(s)") + + summary_text = " and ".join(summary_parts) + print(f"About to delete {summary_text} totalling {strategy.expected_freed_size_str}.") + print_cache_selected_revisions(resolution.selected) + + if dry_run: + print("Dry run: no files were deleted.") + return + + if not yes and not typer.confirm("Proceed with deletion?", default=False): + print("Deletion cancelled.") + return + + strategy.execute() + counts = summarize_deletions(resolution.selected) + print( + f"Deleted {counts.repo_count} repo(s) and {counts.total_revision_count} revision(s); freed {strategy.expected_freed_size_str}." + ) + + +@cache_cli.command(examples=["hf cache prune", "hf cache prune --dry-run"]) +def prune( + cache_dir: Annotated[ + Optional[str], + typer.Option( + help="Cache directory to scan (defaults to Hugging Face cache).", + ), + ] = None, + yes: Annotated[ + bool, + typer.Option( + "-y", + "--yes", + help="Skip confirmation prompt.", + ), + ] = False, + dry_run: Annotated[ + bool, + typer.Option( + help="Preview deletions without removing anything.", + ), + ] = False, +) -> None: + """Remove detached revisions from the cache.""" + try: + hf_cache_info = scan_cache_dir(cache_dir) + except CacheNotFound as exc: + raise CLIError(f"Cache directory not found: {exc.cache_dir}") from exc + + selected: dict[CachedRepoInfo, frozenset[CachedRevisionInfo]] = {} + revisions: set[str] = set() + for repo in hf_cache_info.repos: + detached = frozenset(revision for revision in repo.revisions if len(revision.refs) == 0) + if not detached: + continue + selected[repo] = detached + revisions.update(revision.commit_hash for revision in detached) + + if len(revisions) == 0: + print("No unreferenced revisions found. Nothing to prune.") + return + + resolution = _DeletionResolution( + revisions=frozenset(revisions), + selected=selected, + missing=(), + ) + strategy = hf_cache_info.delete_revisions(*sorted(resolution.revisions)) + counts = summarize_deletions(selected) + + print( + f"About to delete {counts.total_revision_count} unreferenced revision(s) ({strategy.expected_freed_size_str} total)." + ) + print_cache_selected_revisions(selected) + + if dry_run: + print("Dry run: no files were deleted.") + return + + if not yes and not typer.confirm("Proceed?"): + print("Pruning cancelled.") + return + + strategy.execute() + print(f"Deleted {counts.total_revision_count} unreferenced revision(s); freed {strategy.expected_freed_size_str}.") + + +@cache_cli.command( + examples=[ + "hf cache verify gpt2", + "hf cache verify gpt2 --revision refs/pr/1", + "hf cache verify my-dataset --repo-type dataset", + ], +) +def verify( + repo_id: RepoIdArg, + repo_type: RepoTypeOpt = RepoTypeOpt.model, + revision: RevisionOpt = None, + cache_dir: Annotated[ + Optional[str], + typer.Option( + help="Cache directory to use when verifying files from cache (defaults to Hugging Face cache).", + ), + ] = None, + local_dir: Annotated[ + Optional[str], + typer.Option( + help="If set, verify files under this directory instead of the cache.", + ), + ] = None, + fail_on_missing_files: Annotated[ + bool, + typer.Option( + "--fail-on-missing-files", + help="Fail if some files exist on the remote but are missing locally.", + ), + ] = False, + fail_on_extra_files: Annotated[ + bool, + typer.Option( + "--fail-on-extra-files", + help="Fail if some files exist locally but are not present on the remote revision.", + ), + ] = False, + token: TokenOpt = None, +) -> None: + """Verify checksums for a single repo revision from cache or a local directory. + + Examples: + - Verify main revision in cache: `hf cache verify gpt2` + - Verify specific revision: `hf cache verify gpt2 --revision refs/pr/1` + - Verify dataset: `hf cache verify karpathy/fineweb-edu-100b-shuffle --repo-type dataset` + - Verify local dir: `hf cache verify deepseek-ai/DeepSeek-OCR --local-dir /path/to/repo` + """ + + if local_dir is not None and cache_dir is not None: + print("Cannot pass both --local-dir and --cache-dir. Use one or the other.") + raise typer.Exit(code=2) + + api = get_hf_api(token=token) + + result = api.verify_repo_checksums( + repo_id=repo_id, + repo_type=repo_type.value if hasattr(repo_type, "value") else str(repo_type), + revision=revision, + local_dir=local_dir, + cache_dir=cache_dir, + token=token, + ) + + exit_code = 0 + + has_mismatches = bool(result.mismatches) + if has_mismatches: + print("❌ Checksum verification failed for the following file(s):") + for m in result.mismatches: + print(f" - {m['path']}: expected {m['expected']} ({m['algorithm']}), got {m['actual']}") + exit_code = 1 + + if result.missing_paths: + if fail_on_missing_files: + print("Missing files (present remotely, absent locally):") + for p in result.missing_paths: + print(f" - {p}") + exit_code = 1 + else: + warning = ( + f"{len(result.missing_paths)} remote file(s) are missing locally. " + "Use --fail-on-missing-files for details." + ) + print(f"⚠️ {warning}") + + if result.extra_paths: + if fail_on_extra_files: + print("Extra files (present locally, absent remotely):") + for p in result.extra_paths: + print(f" - {p}") + exit_code = 1 + else: + warning = ( + f"{len(result.extra_paths)} local file(s) do not exist on the remote repo. " + "Use --fail-on-extra-files for details." + ) + print(f"⚠️ {warning}") + + verified_location = result.verified_path + + if exit_code != 0: + print(f"❌ Verification failed for '{repo_id}' ({repo_type.value}) in {verified_location}.") + print(f" Revision: {result.revision}") + raise typer.Exit(code=exit_code) + + print(f"✅ Verified {result.checked_count} file(s) for '{repo_id}' ({repo_type.value}) in {verified_location}") + print(" All checksums match.") diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/collections.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/collections.py new file mode 100644 index 0000000000000000000000000000000000000000..9f6f66fd968cd12a861810183a7c8affc23f5246 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/collections.py @@ -0,0 +1,331 @@ +# Copyright 2026 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains commands to interact with collections on the Hugging Face Hub. + +Usage: + # list collections on the Hub + hf collections ls + + # list collections for a specific user + hf collections ls --owner username + + # get info about a collection + hf collections info username/collection-slug + + # create a new collection + hf collections create "My Collection" --description "A collection of models" + + # add an item to a collection + hf collections add-item username/collection-slug username/model-name model + + # delete a collection + hf collections delete username/collection-slug +""" + +import enum +import json +from typing import Annotated, Optional, get_args + +import typer + +from huggingface_hub.hf_api import CollectionItemType_T, CollectionSort_T + +from ._cli_utils import ( + FormatOpt, + LimitOpt, + OutputFormat, + QuietOpt, + TokenOpt, + api_object_to_dict, + get_hf_api, + print_list_output, + typer_factory, +) + + +# Build enums dynamically from Literal types to avoid duplication +_COLLECTION_ITEM_TYPES = get_args(CollectionItemType_T) +CollectionItemType = enum.Enum("CollectionItemType", {t: t for t in _COLLECTION_ITEM_TYPES}, type=str) # type: ignore[misc] + +_COLLECTION_SORT_OPTIONS = get_args(CollectionSort_T) +CollectionSort = enum.Enum("CollectionSort", {s: s for s in _COLLECTION_SORT_OPTIONS}, type=str) # type: ignore[misc] + + +collections_cli = typer_factory(help="Interact with collections on the Hub.") + + +@collections_cli.command( + "ls", + examples=[ + "hf collections ls", + "hf collections ls --owner nvidia", + "hf collections ls --item models/teknium/OpenHermes-2.5-Mistral-7B --limit 10", + ], +) +def collections_ls( + owner: Annotated[ + Optional[str], + typer.Option(help="Filter by owner username or organization."), + ] = None, + item: Annotated[ + Optional[str], + typer.Option( + help='Filter collections containing a specific item (e.g., "models/gpt2", "datasets/squad", "papers/2311.12983").' + ), + ] = None, + sort: Annotated[ + Optional[CollectionSort], + typer.Option(help="Sort results by last modified, trending, or upvotes."), + ] = None, + limit: LimitOpt = 10, + format: FormatOpt = OutputFormat.table, + quiet: QuietOpt = False, + token: TokenOpt = None, +) -> None: + """List collections on the Hub.""" + api = get_hf_api(token=token) + sort_key = sort.value if sort else None + results = [ + api_object_to_dict(collection) + for collection in api.list_collections( + owner=owner, + item=item, + sort=sort_key, # type: ignore[arg-type] + limit=limit, + ) + ] + print_list_output(results, format=format, quiet=quiet) + + +@collections_cli.command( + "info", + examples=[ + "hf collections info username/my-collection-slug", + ], +) +def collections_info( + collection_slug: Annotated[str, typer.Argument(help="The collection slug (e.g., 'username/collection-slug').")], + token: TokenOpt = None, +) -> None: + """Get info about a collection on the Hub.""" + api = get_hf_api(token=token) + collection = api.get_collection(collection_slug) + print(json.dumps(api_object_to_dict(collection), indent=2)) + + +@collections_cli.command( + "create", + examples=[ + 'hf collections create "My Models"', + 'hf collections create "My Models" --description "A collection of my favorite models" --private', + 'hf collections create "Org Collection" --namespace my-org', + ], +) +def collections_create( + title: Annotated[str, typer.Argument(help="The title of the collection.")], + namespace: Annotated[ + Optional[str], + typer.Option(help="The namespace (username or organization). Defaults to the authenticated user."), + ] = None, + description: Annotated[ + Optional[str], + typer.Option(help="A description for the collection."), + ] = None, + private: Annotated[ + bool, + typer.Option(help="Create a private collection."), + ] = False, + exists_ok: Annotated[ + bool, + typer.Option(help="Do not raise an error if the collection already exists."), + ] = False, + token: TokenOpt = None, +) -> None: + """Create a new collection on the Hub.""" + api = get_hf_api(token=token) + collection = api.create_collection( + title=title, + namespace=namespace, + description=description, + private=private, + exists_ok=exists_ok, + ) + print(f"Collection created: {collection.url}") + print(json.dumps(api_object_to_dict(collection), indent=2)) + + +@collections_cli.command( + "update", + examples=[ + 'hf collections update username/my-collection --title "New Title"', + 'hf collections update username/my-collection --description "Updated description"', + "hf collections update username/my-collection --private --theme green", + ], +) +def collections_update( + collection_slug: Annotated[str, typer.Argument(help="The collection slug (e.g., 'username/collection-slug').")], + title: Annotated[ + Optional[str], + typer.Option(help="The new title for the collection."), + ] = None, + description: Annotated[ + Optional[str], + typer.Option(help="The new description for the collection."), + ] = None, + position: Annotated[ + Optional[int], + typer.Option(help="The new position of the collection in the owner's list."), + ] = None, + private: Annotated[ + Optional[bool], + typer.Option(help="Whether the collection should be private."), + ] = None, + theme: Annotated[ + Optional[str], + typer.Option(help="The theme color for the collection (e.g., 'green', 'blue')."), + ] = None, + token: TokenOpt = None, +) -> None: + """Update a collection's metadata on the Hub.""" + api = get_hf_api(token=token) + collection = api.update_collection_metadata( + collection_slug=collection_slug, + title=title, + description=description, + position=position, + private=private, + theme=theme, + ) + print(f"Collection updated: {collection.url}") + print(json.dumps(api_object_to_dict(collection), indent=2)) + + +@collections_cli.command( + "delete", + examples=[ + "hf collections delete username/my-collection", + "hf collections delete username/my-collection --missing-ok", + ], +) +def collections_delete( + collection_slug: Annotated[str, typer.Argument(help="The collection slug (e.g., 'username/collection-slug').")], + missing_ok: Annotated[ + bool, + typer.Option(help="Do not raise an error if the collection doesn't exist."), + ] = False, + token: TokenOpt = None, +) -> None: + """Delete a collection from the Hub.""" + api = get_hf_api(token=token) + api.delete_collection(collection_slug, missing_ok=missing_ok) + print(f"Collection deleted: {collection_slug}") + + +@collections_cli.command( + "add-item", + examples=[ + "hf collections add-item username/my-collection moonshotai/kimi-k2 model", + 'hf collections add-item username/my-collection Qwen/DeepPlanning dataset --note "Useful dataset"', + "hf collections add-item username/my-collection Tongyi-MAI/Z-Image space", + ], +) +def collections_add_item( + collection_slug: Annotated[str, typer.Argument(help="The collection slug (e.g., 'username/collection-slug').")], + item_id: Annotated[ + str, typer.Argument(help="The ID of the item to add (repo_id for repos, paper ID for papers).") + ], + item_type: Annotated[ + CollectionItemType, + typer.Argument(help="The type of item (model, dataset, space, paper, or collection)."), + ], + note: Annotated[ + Optional[str], + typer.Option(help="A note to attach to the item (max 500 characters)."), + ] = None, + exists_ok: Annotated[ + bool, + typer.Option(help="Do not raise an error if the item is already in the collection."), + ] = False, + token: TokenOpt = None, +) -> None: + """Add an item to a collection.""" + api = get_hf_api(token=token) + collection = api.add_collection_item( + collection_slug=collection_slug, + item_id=item_id, + item_type=item_type.value, # type: ignore[arg-type] + note=note, + exists_ok=exists_ok, + ) + print(f"Item added to collection: {collection_slug}") + print(json.dumps(api_object_to_dict(collection), indent=2)) + + +@collections_cli.command( + "update-item", + examples=[ + 'hf collections update-item username/my-collection ITEM_OBJECT_ID --note "Updated note"', + "hf collections update-item username/my-collection ITEM_OBJECT_ID --position 0", + ], +) +def collections_update_item( + collection_slug: Annotated[str, typer.Argument(help="The collection slug (e.g., 'username/collection-slug').")], + item_object_id: Annotated[ + str, + typer.Argument(help="The ID of the item in the collection (from 'item_object_id' field, not the repo_id)."), + ], + note: Annotated[ + Optional[str], + typer.Option(help="A new note for the item (max 500 characters)."), + ] = None, + position: Annotated[ + Optional[int], + typer.Option(help="The new position of the item in the collection."), + ] = None, + token: TokenOpt = None, +) -> None: + """Update an item in a collection.""" + api = get_hf_api(token=token) + api.update_collection_item( + collection_slug=collection_slug, + item_object_id=item_object_id, + note=note, + position=position, + ) + print(f"Item updated in collection: {collection_slug}") + + +@collections_cli.command("delete-item") +def collections_delete_item( + collection_slug: Annotated[str, typer.Argument(help="The collection slug (e.g., 'username/collection-slug').")], + item_object_id: Annotated[ + str, + typer.Argument( + help="The ID of the item in the collection (retrieved from `item_object_id` field returned by 'hf collections info'." + ), + ], + missing_ok: Annotated[ + bool, + typer.Option(help="Do not raise an error if the item doesn't exist."), + ] = False, + token: TokenOpt = None, +) -> None: + """Delete an item from a collection.""" + api = get_hf_api(token=token) + api.delete_collection_item( + collection_slug=collection_slug, + item_object_id=item_object_id, + missing_ok=missing_ok, + ) + print(f"Item deleted from collection: {collection_slug}") diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/datasets.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..3f6d627b80c0c0948c2861d38efb6e4397d9d3d6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/datasets.py @@ -0,0 +1,126 @@ +# Copyright 2026 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains commands to interact with datasets on the Hugging Face Hub. + +Usage: + # list datasets on the Hub + hf datasets ls + + # list datasets with a search query + hf datasets ls --search "code" + + # get info about a dataset + hf datasets info HuggingFaceFW/fineweb +""" + +import enum +import json +from typing import Annotated, Optional, get_args + +import typer + +from huggingface_hub.errors import CLIError, RepositoryNotFoundError, RevisionNotFoundError +from huggingface_hub.hf_api import DatasetSort_T, ExpandDatasetProperty_T + +from ._cli_utils import ( + AuthorOpt, + FilterOpt, + FormatOpt, + LimitOpt, + OutputFormat, + QuietOpt, + RevisionOpt, + SearchOpt, + TokenOpt, + api_object_to_dict, + get_hf_api, + make_expand_properties_parser, + print_list_output, + typer_factory, +) + + +_EXPAND_PROPERTIES = sorted(get_args(ExpandDatasetProperty_T)) +_SORT_OPTIONS = get_args(DatasetSort_T) +DatasetSortEnum = enum.Enum("DatasetSortEnum", {s: s for s in _SORT_OPTIONS}, type=str) # type: ignore[misc] + + +ExpandOpt = Annotated[ + Optional[str], + typer.Option( + help=f"Comma-separated properties to expand. Example: '--expand=downloads,likes,tags'. Valid: {', '.join(_EXPAND_PROPERTIES)}.", + callback=make_expand_properties_parser(_EXPAND_PROPERTIES), + ), +] + + +datasets_cli = typer_factory(help="Interact with datasets on the Hub.") + + +@datasets_cli.command( + "ls", + examples=[ + "hf datasets ls", + "hf datasets ls --sort downloads --limit 10", + 'hf datasets ls --search "code"', + ], +) +def datasets_ls( + search: SearchOpt = None, + author: AuthorOpt = None, + filter: FilterOpt = None, + sort: Annotated[ + Optional[DatasetSortEnum], + typer.Option(help="Sort results."), + ] = None, + limit: LimitOpt = 10, + expand: ExpandOpt = None, + format: FormatOpt = OutputFormat.table, + quiet: QuietOpt = False, + token: TokenOpt = None, +) -> None: + """List datasets on the Hub.""" + api = get_hf_api(token=token) + sort_key = sort.value if sort else None + results = [ + api_object_to_dict(dataset_info) + for dataset_info in api.list_datasets( + filter=filter, author=author, search=search, sort=sort_key, limit=limit, expand=expand + ) + ] + print_list_output(results, format=format, quiet=quiet) + + +@datasets_cli.command( + "info", + examples=[ + "hf datasets info HuggingFaceFW/fineweb", + "hf datasets info my-dataset --expand downloads,likes,tags", + ], +) +def datasets_info( + dataset_id: Annotated[str, typer.Argument(help="The dataset ID (e.g. `username/repo-name`).")], + revision: RevisionOpt = None, + expand: ExpandOpt = None, + token: TokenOpt = None, +) -> None: + """Get info about a dataset on the Hub.""" + api = get_hf_api(token=token) + try: + info = api.dataset_info(repo_id=dataset_id, revision=revision, expand=expand) # type: ignore[arg-type] + except RepositoryNotFoundError as e: + raise CLIError(f"Dataset '{dataset_id}' not found.") from e + except RevisionNotFoundError as e: + raise CLIError(f"Revision '{revision}' not found on '{dataset_id}'.") from e + print(json.dumps(api_object_to_dict(info), indent=2)) diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/download.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/download.py new file mode 100644 index 0000000000000000000000000000000000000000..2d89b944d30d2c13e06e9eb4aa6b0d35829a24fd --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/download.py @@ -0,0 +1,197 @@ +# coding=utf-8 +# Copyright 202-present, the HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains command to download files from the Hub with the CLI. + +Usage: + hf download --help + + # Download file + hf download gpt2 config.json + + # Download entire repo + hf download fffiloni/zeroscope --repo-type=space --revision=refs/pr/78 + + # Download repo with filters + hf download gpt2 --include="*.safetensors" + + # Download with token + hf download Wauplin/private-model --token=hf_*** + + # Download quietly (no progress bar, no warnings, only the returned path) + hf download gpt2 config.json --quiet + + # Download to local dir + hf download gpt2 --local-dir=./models/gpt2 +""" + +import warnings +from typing import Annotated, Optional, Union + +import typer + +from huggingface_hub import logging +from huggingface_hub._snapshot_download import snapshot_download +from huggingface_hub.file_download import DryRunFileInfo, hf_hub_download +from huggingface_hub.utils import _format_size, disable_progress_bars, enable_progress_bars, tabulate + +from ._cli_utils import RepoIdArg, RepoTypeOpt, RevisionOpt, TokenOpt + + +DOWNLOAD_EXAMPLES = [ + "hf download meta-llama/Llama-3.2-1B-Instruct", + "hf download meta-llama/Llama-3.2-1B-Instruct config.json tokenizer.json", + 'hf download meta-llama/Llama-3.2-1B-Instruct --include "*.safetensors" --exclude "*.bin"', + "hf download meta-llama/Llama-3.2-1B-Instruct --local-dir ./models/llama", +] + + +logger = logging.get_logger(__name__) + + +def download( + repo_id: RepoIdArg, + filenames: Annotated[ + Optional[list[str]], + typer.Argument( + help="Files to download (e.g. `config.json`, `data/metadata.jsonl`).", + ), + ] = None, + repo_type: RepoTypeOpt = RepoTypeOpt.model, + revision: RevisionOpt = None, + include: Annotated[ + Optional[list[str]], + typer.Option( + help="Glob patterns to include from files to download. eg: *.json", + ), + ] = None, + exclude: Annotated[ + Optional[list[str]], + typer.Option( + help="Glob patterns to exclude from files to download.", + ), + ] = None, + cache_dir: Annotated[ + Optional[str], + typer.Option( + help="Directory where to save files.", + ), + ] = None, + local_dir: Annotated[ + Optional[str], + typer.Option( + help="If set, the downloaded file will be placed under this directory. Check out https://huggingface.co/docs/huggingface_hub/guides/download#download-files-to-a-local-folder for more details.", + ), + ] = None, + force_download: Annotated[ + bool, + typer.Option( + help="If True, the files will be downloaded even if they are already cached.", + ), + ] = False, + dry_run: Annotated[ + bool, + typer.Option( + help="If True, perform a dry run without actually downloading the file.", + ), + ] = False, + token: TokenOpt = None, + quiet: Annotated[ + bool, + typer.Option( + help="If True, progress bars are disabled and only the path to the download files is printed.", + ), + ] = False, + max_workers: Annotated[ + int, + typer.Option( + help="Maximum number of workers to use for downloading files. Default is 8.", + ), + ] = 8, +) -> None: + """Download files from the Hub.""" + + def run_download() -> Union[str, DryRunFileInfo, list[DryRunFileInfo]]: + filenames_list = filenames if filenames is not None else [] + # Warn user if patterns are ignored + if len(filenames_list) > 0: + if include is not None and len(include) > 0: + warnings.warn("Ignoring `--include` since filenames have being explicitly set.") + if exclude is not None and len(exclude) > 0: + warnings.warn("Ignoring `--exclude` since filenames have being explicitly set.") + + # Single file to download: use `hf_hub_download` + if len(filenames_list) == 1: + return hf_hub_download( + repo_id=repo_id, + repo_type=repo_type.value, + revision=revision, + filename=filenames_list[0], + cache_dir=cache_dir, + force_download=force_download, + token=token, + local_dir=local_dir, + library_name="huggingface-cli", + dry_run=dry_run, + ) + + # Otherwise: use `snapshot_download` to ensure all files comes from same revision + if len(filenames_list) == 0: + allow_patterns = include + ignore_patterns = exclude + else: + allow_patterns = filenames_list + ignore_patterns = None + + return snapshot_download( + repo_id=repo_id, + repo_type=repo_type.value, + revision=revision, + allow_patterns=allow_patterns, + ignore_patterns=ignore_patterns, + force_download=force_download, + cache_dir=cache_dir, + token=token, + local_dir=local_dir, + library_name="huggingface-cli", + max_workers=max_workers, + dry_run=dry_run, + ) + + def _print_result(result: Union[str, DryRunFileInfo, list[DryRunFileInfo]]) -> None: + if isinstance(result, str): + print(result) + return + + # Print dry run info + if isinstance(result, DryRunFileInfo): + result = [result] + print( + f"[dry-run] Will download {len([r for r in result if r.will_download])} files (out of {len(result)}) totalling {_format_size(sum(r.file_size for r in result if r.will_download))}." + ) + columns = ["File", "Bytes to download"] + items: list[list[Union[str, int]]] = [] + for info in sorted(result, key=lambda x: x.filename): + items.append([info.filename, _format_size(info.file_size) if info.will_download else "-"]) + print(tabulate(items, headers=columns)) + + if quiet: + disable_progress_bars() + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + _print_result(run_download()) + enable_progress_bars() + else: + _print_result(run_download()) + logging.set_verbosity_warning() diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/hf.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/hf.py new file mode 100644 index 0000000000000000000000000000000000000000..f044e0a06337adc02c54d52c00fdae3ac211b421 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/hf.py @@ -0,0 +1,98 @@ +# Copyright 2020 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import traceback + +from huggingface_hub import constants +from huggingface_hub.cli._cli_utils import check_cli_update, typer_factory +from huggingface_hub.cli._errors import format_known_exception +from huggingface_hub.cli.auth import auth_cli +from huggingface_hub.cli.cache import cache_cli +from huggingface_hub.cli.collections import collections_cli +from huggingface_hub.cli.datasets import datasets_cli +from huggingface_hub.cli.download import DOWNLOAD_EXAMPLES, download +from huggingface_hub.cli.inference_endpoints import ie_cli +from huggingface_hub.cli.jobs import jobs_cli +from huggingface_hub.cli.lfs import lfs_enable_largefiles, lfs_multipart_upload +from huggingface_hub.cli.models import models_cli +from huggingface_hub.cli.papers import papers_cli +from huggingface_hub.cli.repo import repo_cli +from huggingface_hub.cli.repo_files import repo_files_cli +from huggingface_hub.cli.skills import skills_cli +from huggingface_hub.cli.spaces import spaces_cli +from huggingface_hub.cli.system import env, version +from huggingface_hub.cli.upload import UPLOAD_EXAMPLES, upload +from huggingface_hub.cli.upload_large_folder import UPLOAD_LARGE_FOLDER_EXAMPLES, upload_large_folder +from huggingface_hub.errors import CLIError +from huggingface_hub.utils import ANSI, logging + + +app = typer_factory(help="Hugging Face Hub CLI") + + +# top level single commands (defined in their respective files) +app.command(examples=DOWNLOAD_EXAMPLES)(download) +app.command(examples=UPLOAD_EXAMPLES)(upload) +app.command(examples=UPLOAD_LARGE_FOLDER_EXAMPLES)(upload_large_folder) + +app.command(topic="help")(env) +app.command(topic="help")(version) + +app.command(hidden=True)(lfs_enable_largefiles) +app.command(hidden=True)(lfs_multipart_upload) + +# command groups +app.add_typer(auth_cli, name="auth") +app.add_typer(cache_cli, name="cache") +app.add_typer(collections_cli, name="collections") +app.add_typer(datasets_cli, name="datasets") +app.add_typer(jobs_cli, name="jobs") +app.add_typer(models_cli, name="models") +app.add_typer(papers_cli, name="papers") +app.add_typer(repo_cli, name="repo") +app.add_typer(repo_files_cli, name="repo-files") +app.add_typer(skills_cli, name="skills") +app.add_typer(spaces_cli, name="spaces") +app.add_typer(ie_cli, name="endpoints") + + +def main(): + if not constants.HF_DEBUG: + logging.set_verbosity_info() + check_cli_update("huggingface_hub") + + try: + app() + except CLIError as e: + print(f"Error: {e}", file=sys.stderr) + if constants.HF_DEBUG: + traceback.print_exc() + else: + print(ANSI.gray("Set HF_DEBUG=1 as environment variable for full traceback.")) + sys.exit(1) + except Exception as e: + message = format_known_exception(e) + if message: + print(f"Error: {message}", file=sys.stderr) + if constants.HF_DEBUG: + traceback.print_exc() + else: + print(ANSI.gray("Set HF_DEBUG=1 as environment variable for full traceback.")) + sys.exit(1) + raise + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/inference_endpoints.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/inference_endpoints.py new file mode 100644 index 0000000000000000000000000000000000000000..0e4d6ff9141284bc138d353fcc01ab3694f86321 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/inference_endpoints.py @@ -0,0 +1,456 @@ +"""CLI commands for Hugging Face Inference Endpoints.""" + +import json +from typing import Annotated, Any, Optional + +import typer + +from huggingface_hub._inference_endpoints import InferenceEndpoint, InferenceEndpointScalingMetric +from huggingface_hub.errors import HfHubHTTPError + +from ._cli_utils import ( + FormatOpt, + OutputFormat, + QuietOpt, + TokenOpt, + get_hf_api, + print_list_output, + typer_factory, +) + + +ie_cli = typer_factory(help="Manage Hugging Face Inference Endpoints.") + +catalog_app = typer_factory(help="Interact with the Inference Endpoints catalog.") + + +NameArg = Annotated[ + str, + typer.Argument(help="Endpoint name."), +] +NameOpt = Annotated[ + Optional[str], + typer.Option(help="Endpoint name."), +] + +NamespaceOpt = Annotated[ + Optional[str], + typer.Option( + help="The namespace associated with the Inference Endpoint. Defaults to the current user's namespace.", + ), +] + + +def _print_endpoint(endpoint: InferenceEndpoint) -> None: + typer.echo(json.dumps(endpoint.raw, indent=2, sort_keys=True)) + + +@ie_cli.command(examples=["hf endpoints ls", "hf endpoints ls --namespace my-org"]) +def ls( + namespace: NamespaceOpt = None, + format: FormatOpt = OutputFormat.table, + quiet: QuietOpt = False, + token: TokenOpt = None, +) -> None: + """Lists all Inference Endpoints for the given namespace.""" + api = get_hf_api(token=token) + try: + endpoints = api.list_inference_endpoints(namespace=namespace, token=token) + except HfHubHTTPError as error: + typer.echo(f"Listing failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + + results = [endpoint.raw for endpoint in endpoints] + + def row_fn(item: dict[str, Any]) -> list[str]: + status = item.get("status", {}) + model = item.get("model", {}) + compute = item.get("compute", {}) + provider = item.get("provider", {}) + return [ + str(item.get("name", "")), + str(model.get("repository", "") if isinstance(model, dict) else ""), + str(status.get("state", "") if isinstance(status, dict) else ""), + str(model.get("task", "") if isinstance(model, dict) else ""), + str(model.get("framework", "") if isinstance(model, dict) else ""), + str(compute.get("instanceType", "") if isinstance(compute, dict) else ""), + str(provider.get("vendor", "") if isinstance(provider, dict) else ""), + str(provider.get("region", "") if isinstance(provider, dict) else ""), + ] + + print_list_output( + items=results, + format=format, + quiet=quiet, + id_key="name", + headers=["NAME", "MODEL", "STATUS", "TASK", "FRAMEWORK", "INSTANCE", "VENDOR", "REGION"], + row_fn=row_fn, + ) + + +@ie_cli.command(name="deploy", examples=["hf endpoints deploy my-endpoint --repo gpt2 --framework pytorch ..."]) +def deploy( + name: NameArg, + repo: Annotated[ + str, + typer.Option( + help="The name of the model repository associated with the Inference Endpoint (e.g. 'openai/gpt-oss-120b').", + ), + ], + framework: Annotated[ + str, + typer.Option( + help="The machine learning framework used for the model (e.g. 'vllm').", + ), + ], + accelerator: Annotated[ + str, + typer.Option( + help="The hardware accelerator to be used for inference (e.g. 'cpu').", + ), + ], + instance_size: Annotated[ + str, + typer.Option( + help="The size or type of the instance to be used for hosting the model (e.g. 'x4').", + ), + ], + instance_type: Annotated[ + str, + typer.Option( + help="The cloud instance type where the Inference Endpoint will be deployed (e.g. 'intel-icl').", + ), + ], + region: Annotated[ + str, + typer.Option( + help="The cloud region in which the Inference Endpoint will be created (e.g. 'us-east-1').", + ), + ], + vendor: Annotated[ + str, + typer.Option( + help="The cloud provider or vendor where the Inference Endpoint will be hosted (e.g. 'aws').", + ), + ], + *, + namespace: NamespaceOpt = None, + task: Annotated[ + Optional[str], + typer.Option( + help="The task on which to deploy the model (e.g. 'text-classification').", + ), + ] = None, + token: TokenOpt = None, + min_replica: Annotated[ + int, + typer.Option( + help="The minimum number of replicas (instances) to keep running for the Inference Endpoint.", + ), + ] = 1, + max_replica: Annotated[ + int, + typer.Option( + help="The maximum number of replicas (instances) to scale to for the Inference Endpoint.", + ), + ] = 1, + scale_to_zero_timeout: Annotated[ + Optional[int], + typer.Option( + help="The duration in minutes before an inactive endpoint is scaled to zero.", + ), + ] = None, + scaling_metric: Annotated[ + Optional[InferenceEndpointScalingMetric], + typer.Option( + help="The metric reference for scaling.", + ), + ] = None, + scaling_threshold: Annotated[ + Optional[float], + typer.Option( + help="The scaling metric threshold used to trigger a scale up. Ignored when scaling metric is not provided.", + ), + ] = None, +) -> None: + """Deploy an Inference Endpoint from a Hub repository.""" + api = get_hf_api(token=token) + endpoint = api.create_inference_endpoint( + name=name, + repository=repo, + framework=framework, + accelerator=accelerator, + instance_size=instance_size, + instance_type=instance_type, + region=region, + vendor=vendor, + namespace=namespace, + task=task, + token=token, + min_replica=min_replica, + max_replica=max_replica, + scaling_metric=scaling_metric, + scaling_threshold=scaling_threshold, + scale_to_zero_timeout=scale_to_zero_timeout, + ) + + _print_endpoint(endpoint) + + +@catalog_app.command(name="deploy", examples=["hf endpoints catalog deploy --repo meta-llama/Llama-3.2-1B-Instruct"]) +def deploy_from_catalog( + repo: Annotated[ + str, + typer.Option( + help="The name of the model repository associated with the Inference Endpoint (e.g. 'openai/gpt-oss-120b').", + ), + ], + name: NameOpt = None, + namespace: NamespaceOpt = None, + token: TokenOpt = None, +) -> None: + """Deploy an Inference Endpoint from the Model Catalog.""" + api = get_hf_api(token=token) + try: + endpoint = api.create_inference_endpoint_from_catalog( + repo_id=repo, + name=name, + namespace=namespace, + token=token, + ) + except HfHubHTTPError as error: + typer.echo(f"Deployment failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + + _print_endpoint(endpoint) + + +def list_catalog( + token: TokenOpt = None, +) -> None: + """List available Catalog models.""" + api = get_hf_api(token=token) + try: + models = api.list_inference_catalog(token=token) + except HfHubHTTPError as error: + typer.echo(f"Catalog fetch failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + + typer.echo(json.dumps({"models": models}, indent=2, sort_keys=True)) + + +catalog_app.command(name="ls", examples=["hf endpoints catalog ls"])(list_catalog) +ie_cli.command(name="list-catalog", hidden=True)(list_catalog) + + +ie_cli.add_typer(catalog_app, name="catalog") + + +@ie_cli.command(examples=["hf endpoints describe my-endpoint"]) +def describe( + name: NameArg, + namespace: NamespaceOpt = None, + token: TokenOpt = None, +) -> None: + """Get information about an existing endpoint.""" + api = get_hf_api(token=token) + try: + endpoint = api.get_inference_endpoint(name=name, namespace=namespace, token=token) + except HfHubHTTPError as error: + typer.echo(f"Fetch failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + + _print_endpoint(endpoint) + + +@ie_cli.command(examples=["hf endpoints update my-endpoint --min-replica 2"]) +def update( + name: NameArg, + namespace: NamespaceOpt = None, + repo: Annotated[ + Optional[str], + typer.Option( + help="The name of the model repository associated with the Inference Endpoint (e.g. 'openai/gpt-oss-120b').", + ), + ] = None, + accelerator: Annotated[ + Optional[str], + typer.Option( + help="The hardware accelerator to be used for inference (e.g. 'cpu').", + ), + ] = None, + instance_size: Annotated[ + Optional[str], + typer.Option( + help="The size or type of the instance to be used for hosting the model (e.g. 'x4').", + ), + ] = None, + instance_type: Annotated[ + Optional[str], + typer.Option( + help="The cloud instance type where the Inference Endpoint will be deployed (e.g. 'intel-icl').", + ), + ] = None, + framework: Annotated[ + Optional[str], + typer.Option( + help="The machine learning framework used for the model (e.g. 'custom').", + ), + ] = None, + revision: Annotated[ + Optional[str], + typer.Option( + help="The specific model revision to deploy on the Inference Endpoint (e.g. '6c0e6080953db56375760c0471a8c5f2929baf11').", + ), + ] = None, + task: Annotated[ + Optional[str], + typer.Option( + help="The task on which to deploy the model (e.g. 'text-classification').", + ), + ] = None, + min_replica: Annotated[ + Optional[int], + typer.Option( + help="The minimum number of replicas (instances) to keep running for the Inference Endpoint.", + ), + ] = None, + max_replica: Annotated[ + Optional[int], + typer.Option( + help="The maximum number of replicas (instances) to scale to for the Inference Endpoint.", + ), + ] = None, + scale_to_zero_timeout: Annotated[ + Optional[int], + typer.Option( + help="The duration in minutes before an inactive endpoint is scaled to zero.", + ), + ] = None, + scaling_metric: Annotated[ + Optional[InferenceEndpointScalingMetric], + typer.Option( + help="The metric reference for scaling.", + ), + ] = None, + scaling_threshold: Annotated[ + Optional[float], + typer.Option( + help="The scaling metric threshold used to trigger a scale up. Ignored when scaling metric is not provided.", + ), + ] = None, + token: TokenOpt = None, +) -> None: + """Update an existing endpoint.""" + api = get_hf_api(token=token) + try: + endpoint = api.update_inference_endpoint( + name=name, + namespace=namespace, + repository=repo, + framework=framework, + revision=revision, + task=task, + accelerator=accelerator, + instance_size=instance_size, + instance_type=instance_type, + min_replica=min_replica, + max_replica=max_replica, + scale_to_zero_timeout=scale_to_zero_timeout, + scaling_metric=scaling_metric, + scaling_threshold=scaling_threshold, + token=token, + ) + except HfHubHTTPError as error: + typer.echo(f"Update failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + _print_endpoint(endpoint) + + +@ie_cli.command(examples=["hf endpoints delete my-endpoint"]) +def delete( + name: NameArg, + namespace: NamespaceOpt = None, + yes: Annotated[ + bool, + typer.Option("--yes", help="Skip confirmation prompts."), + ] = False, + token: TokenOpt = None, +) -> None: + """Delete an Inference Endpoint permanently.""" + if not yes: + confirmation = typer.prompt(f"Delete endpoint '{name}'? Type the name to confirm.") + if confirmation != name: + typer.echo("Aborted.") + raise typer.Exit(code=2) + + api = get_hf_api(token=token) + try: + api.delete_inference_endpoint(name=name, namespace=namespace, token=token) + except HfHubHTTPError as error: + typer.echo(f"Delete failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + + typer.echo(f"Deleted '{name}'.") + + +@ie_cli.command(examples=["hf endpoints pause my-endpoint"]) +def pause( + name: NameArg, + namespace: NamespaceOpt = None, + token: TokenOpt = None, +) -> None: + """Pause an Inference Endpoint.""" + api = get_hf_api(token=token) + try: + endpoint = api.pause_inference_endpoint(name=name, namespace=namespace, token=token) + except HfHubHTTPError as error: + typer.echo(f"Pause failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + + _print_endpoint(endpoint) + + +@ie_cli.command(examples=["hf endpoints resume my-endpoint"]) +def resume( + name: NameArg, + namespace: NamespaceOpt = None, + fail_if_already_running: Annotated[ + bool, + typer.Option( + "--fail-if-already-running", + help="If `True`, the method will raise an error if the Inference Endpoint is already running.", + ), + ] = False, + token: TokenOpt = None, +) -> None: + """Resume an Inference Endpoint.""" + api = get_hf_api(token=token) + try: + endpoint = api.resume_inference_endpoint( + name=name, + namespace=namespace, + token=token, + running_ok=not fail_if_already_running, + ) + except HfHubHTTPError as error: + typer.echo(f"Resume failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + _print_endpoint(endpoint) + + +@ie_cli.command(examples=["hf endpoints scale-to-zero my-endpoint"]) +def scale_to_zero( + name: NameArg, + namespace: NamespaceOpt = None, + token: TokenOpt = None, +) -> None: + """Scale an Inference Endpoint to zero.""" + api = get_hf_api(token=token) + try: + endpoint = api.scale_to_zero_inference_endpoint(name=name, namespace=namespace, token=token) + except HfHubHTTPError as error: + typer.echo(f"Scale To Zero failed: {error}") + raise typer.Exit(code=error.response.status_code) from error + + _print_endpoint(endpoint) diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/cli/jobs.py b/venv/lib/python3.10/site-packages/huggingface_hub/cli/jobs.py new file mode 100644 index 0000000000000000000000000000000000000000..a36c34c152bdb7720e9f02b47b8d1c8a49bcf817 --- /dev/null +++ b/venv/lib/python3.10/site-packages/huggingface_hub/cli/jobs.py @@ -0,0 +1,1078 @@ +# Copyright 2025 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains commands to interact with jobs on the Hugging Face Hub. + +Usage: + # run a job + hf jobs run + + # List running or completed jobs + hf jobs ps [-a] [-f key=value] [--format TEMPLATE] + + # Stream logs from a job + hf jobs logs + + # Stream resources usage stats and metrics from a job + hf jobs stats + + # Inspect detailed information about a job + hf jobs inspect + + # Cancel a running job + hf jobs cancel + + # List available hardware options + hf jobs hardware + + # Run a UV script + hf jobs uv run