Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/lib/python3.13/site-packages/certifi-2025.7.14.dist-info/licenses/LICENSE +20 -0
- .venv/lib/python3.13/site-packages/certifi/__pycache__/__init__.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/certifi/__pycache__/core.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/__init__.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/api.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/cd.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/constant.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/legacy.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/models.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/utils.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/version.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/cli/__init__.py +8 -0
- .venv/lib/python3.13/site-packages/charset_normalizer/cli/__main__.py +381 -0
- .venv/lib/python3.13/site-packages/filelock-3.18.0.dist-info/licenses/LICENSE +24 -0
- .venv/lib/python3.13/site-packages/filelock/__pycache__/_soft.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/filelock/__pycache__/_windows.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/filelock/__pycache__/asyncio.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__init__.py +27 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/__init__.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/_cli_utils.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/delete_cache.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/download.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/env.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/huggingface_cli.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/lfs.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/repo.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/repo_files.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/scan_cache.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/tag.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/upload.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/upload_large_folder.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/user.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/version.cpython-313.pyc +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/_cli_utils.py +69 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/delete_cache.py +474 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/download.py +200 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/env.py +36 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/huggingface_cli.py +63 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/lfs.py +200 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/repo.py +147 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/repo_files.py +128 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/scan_cache.py +181 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/tag.py +159 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/upload.py +314 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/upload_large_folder.py +129 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/user.py +198 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/commands/version.py +37 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/inference/_client.py +0 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/inference/_common.py +427 -0
- .venv/lib/python3.13/site-packages/huggingface_hub/inference/_generated/__init__.py +0 -0
.venv/lib/python3.13/site-packages/certifi-2025.7.14.dist-info/licenses/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
This package contains a modified version of ca-bundle.crt:
|
| 2 |
+
|
| 3 |
+
ca-bundle.crt -- Bundle of CA Root Certificates
|
| 4 |
+
|
| 5 |
+
This is a bundle of X.509 certificates of public Certificate Authorities
|
| 6 |
+
(CA). These were automatically extracted from Mozilla's root certificates
|
| 7 |
+
file (certdata.txt). This file can be found in the mozilla source tree:
|
| 8 |
+
https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
|
| 9 |
+
It contains the certificates in PEM format and therefore
|
| 10 |
+
can be directly used with curl / libcurl / php_curl, or with
|
| 11 |
+
an Apache+mod_ssl webserver for SSL client authentication.
|
| 12 |
+
Just configure this file as the SSLCACertificateFile.#
|
| 13 |
+
|
| 14 |
+
***** BEGIN LICENSE BLOCK *****
|
| 15 |
+
This Source Code Form is subject to the terms of the Mozilla Public License,
|
| 16 |
+
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
|
| 17 |
+
one at http://mozilla.org/MPL/2.0/.
|
| 18 |
+
|
| 19 |
+
***** END LICENSE BLOCK *****
|
| 20 |
+
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
|
.venv/lib/python3.13/site-packages/certifi/__pycache__/__init__.cpython-313.pyc
ADDED
|
Binary file (326 Bytes). View file
|
|
|
.venv/lib/python3.13/site-packages/certifi/__pycache__/core.cpython-313.pyc
ADDED
|
Binary file (2.08 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/__init__.cpython-313.pyc
ADDED
|
Binary file (1.79 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/api.cpython-313.pyc
ADDED
|
Binary file (18.7 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/cd.cpython-313.pyc
ADDED
|
Binary file (13.4 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/constant.cpython-313.pyc
ADDED
|
Binary file (40.8 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/legacy.cpython-313.pyc
ADDED
|
Binary file (2.89 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/models.cpython-313.pyc
ADDED
|
Binary file (17.3 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/utils.cpython-313.pyc
ADDED
|
Binary file (14.1 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/__pycache__/version.cpython-313.pyc
ADDED
|
Binary file (394 Bytes). View file
|
|
|
.venv/lib/python3.13/site-packages/charset_normalizer/cli/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from .__main__ import cli_detect, query_yes_no
|
| 4 |
+
|
| 5 |
+
__all__ = (
|
| 6 |
+
"cli_detect",
|
| 7 |
+
"query_yes_no",
|
| 8 |
+
)
|
.venv/lib/python3.13/site-packages/charset_normalizer/cli/__main__.py
ADDED
|
@@ -0,0 +1,381 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import argparse
|
| 4 |
+
import sys
|
| 5 |
+
import typing
|
| 6 |
+
from json import dumps
|
| 7 |
+
from os.path import abspath, basename, dirname, join, realpath
|
| 8 |
+
from platform import python_version
|
| 9 |
+
from unicodedata import unidata_version
|
| 10 |
+
|
| 11 |
+
import charset_normalizer.md as md_module
|
| 12 |
+
from charset_normalizer import from_fp
|
| 13 |
+
from charset_normalizer.models import CliDetectionResult
|
| 14 |
+
from charset_normalizer.version import __version__
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def query_yes_no(question: str, default: str = "yes") -> bool:
|
| 18 |
+
"""Ask a yes/no question via input() and return their answer.
|
| 19 |
+
|
| 20 |
+
"question" is a string that is presented to the user.
|
| 21 |
+
"default" is the presumed answer if the user just hits <Enter>.
|
| 22 |
+
It must be "yes" (the default), "no" or None (meaning
|
| 23 |
+
an answer is required of the user).
|
| 24 |
+
|
| 25 |
+
The "answer" return value is True for "yes" or False for "no".
|
| 26 |
+
|
| 27 |
+
Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
|
| 28 |
+
"""
|
| 29 |
+
valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
|
| 30 |
+
if default is None:
|
| 31 |
+
prompt = " [y/n] "
|
| 32 |
+
elif default == "yes":
|
| 33 |
+
prompt = " [Y/n] "
|
| 34 |
+
elif default == "no":
|
| 35 |
+
prompt = " [y/N] "
|
| 36 |
+
else:
|
| 37 |
+
raise ValueError("invalid default answer: '%s'" % default)
|
| 38 |
+
|
| 39 |
+
while True:
|
| 40 |
+
sys.stdout.write(question + prompt)
|
| 41 |
+
choice = input().lower()
|
| 42 |
+
if default is not None and choice == "":
|
| 43 |
+
return valid[default]
|
| 44 |
+
elif choice in valid:
|
| 45 |
+
return valid[choice]
|
| 46 |
+
else:
|
| 47 |
+
sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n")
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class FileType:
|
| 51 |
+
"""Factory for creating file object types
|
| 52 |
+
|
| 53 |
+
Instances of FileType are typically passed as type= arguments to the
|
| 54 |
+
ArgumentParser add_argument() method.
|
| 55 |
+
|
| 56 |
+
Keyword Arguments:
|
| 57 |
+
- mode -- A string indicating how the file is to be opened. Accepts the
|
| 58 |
+
same values as the builtin open() function.
|
| 59 |
+
- bufsize -- The file's desired buffer size. Accepts the same values as
|
| 60 |
+
the builtin open() function.
|
| 61 |
+
- encoding -- The file's encoding. Accepts the same values as the
|
| 62 |
+
builtin open() function.
|
| 63 |
+
- errors -- A string indicating how encoding and decoding errors are to
|
| 64 |
+
be handled. Accepts the same value as the builtin open() function.
|
| 65 |
+
|
| 66 |
+
Backported from CPython 3.12
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
def __init__(
|
| 70 |
+
self,
|
| 71 |
+
mode: str = "r",
|
| 72 |
+
bufsize: int = -1,
|
| 73 |
+
encoding: str | None = None,
|
| 74 |
+
errors: str | None = None,
|
| 75 |
+
):
|
| 76 |
+
self._mode = mode
|
| 77 |
+
self._bufsize = bufsize
|
| 78 |
+
self._encoding = encoding
|
| 79 |
+
self._errors = errors
|
| 80 |
+
|
| 81 |
+
def __call__(self, string: str) -> typing.IO: # type: ignore[type-arg]
|
| 82 |
+
# the special argument "-" means sys.std{in,out}
|
| 83 |
+
if string == "-":
|
| 84 |
+
if "r" in self._mode:
|
| 85 |
+
return sys.stdin.buffer if "b" in self._mode else sys.stdin
|
| 86 |
+
elif any(c in self._mode for c in "wax"):
|
| 87 |
+
return sys.stdout.buffer if "b" in self._mode else sys.stdout
|
| 88 |
+
else:
|
| 89 |
+
msg = f'argument "-" with mode {self._mode}'
|
| 90 |
+
raise ValueError(msg)
|
| 91 |
+
|
| 92 |
+
# all other arguments are used as file names
|
| 93 |
+
try:
|
| 94 |
+
return open(string, self._mode, self._bufsize, self._encoding, self._errors)
|
| 95 |
+
except OSError as e:
|
| 96 |
+
message = f"can't open '{string}': {e}"
|
| 97 |
+
raise argparse.ArgumentTypeError(message)
|
| 98 |
+
|
| 99 |
+
def __repr__(self) -> str:
|
| 100 |
+
args = self._mode, self._bufsize
|
| 101 |
+
kwargs = [("encoding", self._encoding), ("errors", self._errors)]
|
| 102 |
+
args_str = ", ".join(
|
| 103 |
+
[repr(arg) for arg in args if arg != -1]
|
| 104 |
+
+ [f"{kw}={arg!r}" for kw, arg in kwargs if arg is not None]
|
| 105 |
+
)
|
| 106 |
+
return f"{type(self).__name__}({args_str})"
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def cli_detect(argv: list[str] | None = None) -> int:
|
| 110 |
+
"""
|
| 111 |
+
CLI assistant using ARGV and ArgumentParser
|
| 112 |
+
:param argv:
|
| 113 |
+
:return: 0 if everything is fine, anything else equal trouble
|
| 114 |
+
"""
|
| 115 |
+
parser = argparse.ArgumentParser(
|
| 116 |
+
description="The Real First Universal Charset Detector. "
|
| 117 |
+
"Discover originating encoding used on text file. "
|
| 118 |
+
"Normalize text to unicode."
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
parser.add_argument(
|
| 122 |
+
"files", type=FileType("rb"), nargs="+", help="File(s) to be analysed"
|
| 123 |
+
)
|
| 124 |
+
parser.add_argument(
|
| 125 |
+
"-v",
|
| 126 |
+
"--verbose",
|
| 127 |
+
action="store_true",
|
| 128 |
+
default=False,
|
| 129 |
+
dest="verbose",
|
| 130 |
+
help="Display complementary information about file if any. "
|
| 131 |
+
"Stdout will contain logs about the detection process.",
|
| 132 |
+
)
|
| 133 |
+
parser.add_argument(
|
| 134 |
+
"-a",
|
| 135 |
+
"--with-alternative",
|
| 136 |
+
action="store_true",
|
| 137 |
+
default=False,
|
| 138 |
+
dest="alternatives",
|
| 139 |
+
help="Output complementary possibilities if any. Top-level JSON WILL be a list.",
|
| 140 |
+
)
|
| 141 |
+
parser.add_argument(
|
| 142 |
+
"-n",
|
| 143 |
+
"--normalize",
|
| 144 |
+
action="store_true",
|
| 145 |
+
default=False,
|
| 146 |
+
dest="normalize",
|
| 147 |
+
help="Permit to normalize input file. If not set, program does not write anything.",
|
| 148 |
+
)
|
| 149 |
+
parser.add_argument(
|
| 150 |
+
"-m",
|
| 151 |
+
"--minimal",
|
| 152 |
+
action="store_true",
|
| 153 |
+
default=False,
|
| 154 |
+
dest="minimal",
|
| 155 |
+
help="Only output the charset detected to STDOUT. Disabling JSON output.",
|
| 156 |
+
)
|
| 157 |
+
parser.add_argument(
|
| 158 |
+
"-r",
|
| 159 |
+
"--replace",
|
| 160 |
+
action="store_true",
|
| 161 |
+
default=False,
|
| 162 |
+
dest="replace",
|
| 163 |
+
help="Replace file when trying to normalize it instead of creating a new one.",
|
| 164 |
+
)
|
| 165 |
+
parser.add_argument(
|
| 166 |
+
"-f",
|
| 167 |
+
"--force",
|
| 168 |
+
action="store_true",
|
| 169 |
+
default=False,
|
| 170 |
+
dest="force",
|
| 171 |
+
help="Replace file without asking if you are sure, use this flag with caution.",
|
| 172 |
+
)
|
| 173 |
+
parser.add_argument(
|
| 174 |
+
"-i",
|
| 175 |
+
"--no-preemptive",
|
| 176 |
+
action="store_true",
|
| 177 |
+
default=False,
|
| 178 |
+
dest="no_preemptive",
|
| 179 |
+
help="Disable looking at a charset declaration to hint the detector.",
|
| 180 |
+
)
|
| 181 |
+
parser.add_argument(
|
| 182 |
+
"-t",
|
| 183 |
+
"--threshold",
|
| 184 |
+
action="store",
|
| 185 |
+
default=0.2,
|
| 186 |
+
type=float,
|
| 187 |
+
dest="threshold",
|
| 188 |
+
help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.",
|
| 189 |
+
)
|
| 190 |
+
parser.add_argument(
|
| 191 |
+
"--version",
|
| 192 |
+
action="version",
|
| 193 |
+
version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format(
|
| 194 |
+
__version__,
|
| 195 |
+
python_version(),
|
| 196 |
+
unidata_version,
|
| 197 |
+
"OFF" if md_module.__file__.lower().endswith(".py") else "ON",
|
| 198 |
+
),
|
| 199 |
+
help="Show version information and exit.",
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
args = parser.parse_args(argv)
|
| 203 |
+
|
| 204 |
+
if args.replace is True and args.normalize is False:
|
| 205 |
+
if args.files:
|
| 206 |
+
for my_file in args.files:
|
| 207 |
+
my_file.close()
|
| 208 |
+
print("Use --replace in addition of --normalize only.", file=sys.stderr)
|
| 209 |
+
return 1
|
| 210 |
+
|
| 211 |
+
if args.force is True and args.replace is False:
|
| 212 |
+
if args.files:
|
| 213 |
+
for my_file in args.files:
|
| 214 |
+
my_file.close()
|
| 215 |
+
print("Use --force in addition of --replace only.", file=sys.stderr)
|
| 216 |
+
return 1
|
| 217 |
+
|
| 218 |
+
if args.threshold < 0.0 or args.threshold > 1.0:
|
| 219 |
+
if args.files:
|
| 220 |
+
for my_file in args.files:
|
| 221 |
+
my_file.close()
|
| 222 |
+
print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
|
| 223 |
+
return 1
|
| 224 |
+
|
| 225 |
+
x_ = []
|
| 226 |
+
|
| 227 |
+
for my_file in args.files:
|
| 228 |
+
matches = from_fp(
|
| 229 |
+
my_file,
|
| 230 |
+
threshold=args.threshold,
|
| 231 |
+
explain=args.verbose,
|
| 232 |
+
preemptive_behaviour=args.no_preemptive is False,
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
best_guess = matches.best()
|
| 236 |
+
|
| 237 |
+
if best_guess is None:
|
| 238 |
+
print(
|
| 239 |
+
'Unable to identify originating encoding for "{}". {}'.format(
|
| 240 |
+
my_file.name,
|
| 241 |
+
(
|
| 242 |
+
"Maybe try increasing maximum amount of chaos."
|
| 243 |
+
if args.threshold < 1.0
|
| 244 |
+
else ""
|
| 245 |
+
),
|
| 246 |
+
),
|
| 247 |
+
file=sys.stderr,
|
| 248 |
+
)
|
| 249 |
+
x_.append(
|
| 250 |
+
CliDetectionResult(
|
| 251 |
+
abspath(my_file.name),
|
| 252 |
+
None,
|
| 253 |
+
[],
|
| 254 |
+
[],
|
| 255 |
+
"Unknown",
|
| 256 |
+
[],
|
| 257 |
+
False,
|
| 258 |
+
1.0,
|
| 259 |
+
0.0,
|
| 260 |
+
None,
|
| 261 |
+
True,
|
| 262 |
+
)
|
| 263 |
+
)
|
| 264 |
+
else:
|
| 265 |
+
x_.append(
|
| 266 |
+
CliDetectionResult(
|
| 267 |
+
abspath(my_file.name),
|
| 268 |
+
best_guess.encoding,
|
| 269 |
+
best_guess.encoding_aliases,
|
| 270 |
+
[
|
| 271 |
+
cp
|
| 272 |
+
for cp in best_guess.could_be_from_charset
|
| 273 |
+
if cp != best_guess.encoding
|
| 274 |
+
],
|
| 275 |
+
best_guess.language,
|
| 276 |
+
best_guess.alphabets,
|
| 277 |
+
best_guess.bom,
|
| 278 |
+
best_guess.percent_chaos,
|
| 279 |
+
best_guess.percent_coherence,
|
| 280 |
+
None,
|
| 281 |
+
True,
|
| 282 |
+
)
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
if len(matches) > 1 and args.alternatives:
|
| 286 |
+
for el in matches:
|
| 287 |
+
if el != best_guess:
|
| 288 |
+
x_.append(
|
| 289 |
+
CliDetectionResult(
|
| 290 |
+
abspath(my_file.name),
|
| 291 |
+
el.encoding,
|
| 292 |
+
el.encoding_aliases,
|
| 293 |
+
[
|
| 294 |
+
cp
|
| 295 |
+
for cp in el.could_be_from_charset
|
| 296 |
+
if cp != el.encoding
|
| 297 |
+
],
|
| 298 |
+
el.language,
|
| 299 |
+
el.alphabets,
|
| 300 |
+
el.bom,
|
| 301 |
+
el.percent_chaos,
|
| 302 |
+
el.percent_coherence,
|
| 303 |
+
None,
|
| 304 |
+
False,
|
| 305 |
+
)
|
| 306 |
+
)
|
| 307 |
+
|
| 308 |
+
if args.normalize is True:
|
| 309 |
+
if best_guess.encoding.startswith("utf") is True:
|
| 310 |
+
print(
|
| 311 |
+
'"{}" file does not need to be normalized, as it already came from unicode.'.format(
|
| 312 |
+
my_file.name
|
| 313 |
+
),
|
| 314 |
+
file=sys.stderr,
|
| 315 |
+
)
|
| 316 |
+
if my_file.closed is False:
|
| 317 |
+
my_file.close()
|
| 318 |
+
continue
|
| 319 |
+
|
| 320 |
+
dir_path = dirname(realpath(my_file.name))
|
| 321 |
+
file_name = basename(realpath(my_file.name))
|
| 322 |
+
|
| 323 |
+
o_: list[str] = file_name.split(".")
|
| 324 |
+
|
| 325 |
+
if args.replace is False:
|
| 326 |
+
o_.insert(-1, best_guess.encoding)
|
| 327 |
+
if my_file.closed is False:
|
| 328 |
+
my_file.close()
|
| 329 |
+
elif (
|
| 330 |
+
args.force is False
|
| 331 |
+
and query_yes_no(
|
| 332 |
+
'Are you sure to normalize "{}" by replacing it ?'.format(
|
| 333 |
+
my_file.name
|
| 334 |
+
),
|
| 335 |
+
"no",
|
| 336 |
+
)
|
| 337 |
+
is False
|
| 338 |
+
):
|
| 339 |
+
if my_file.closed is False:
|
| 340 |
+
my_file.close()
|
| 341 |
+
continue
|
| 342 |
+
|
| 343 |
+
try:
|
| 344 |
+
x_[0].unicode_path = join(dir_path, ".".join(o_))
|
| 345 |
+
|
| 346 |
+
with open(x_[0].unicode_path, "wb") as fp:
|
| 347 |
+
fp.write(best_guess.output())
|
| 348 |
+
except OSError as e:
|
| 349 |
+
print(str(e), file=sys.stderr)
|
| 350 |
+
if my_file.closed is False:
|
| 351 |
+
my_file.close()
|
| 352 |
+
return 2
|
| 353 |
+
|
| 354 |
+
if my_file.closed is False:
|
| 355 |
+
my_file.close()
|
| 356 |
+
|
| 357 |
+
if args.minimal is False:
|
| 358 |
+
print(
|
| 359 |
+
dumps(
|
| 360 |
+
[el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__,
|
| 361 |
+
ensure_ascii=True,
|
| 362 |
+
indent=4,
|
| 363 |
+
)
|
| 364 |
+
)
|
| 365 |
+
else:
|
| 366 |
+
for my_file in args.files:
|
| 367 |
+
print(
|
| 368 |
+
", ".join(
|
| 369 |
+
[
|
| 370 |
+
el.encoding or "undefined"
|
| 371 |
+
for el in x_
|
| 372 |
+
if el.path == abspath(my_file.name)
|
| 373 |
+
]
|
| 374 |
+
)
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
return 0
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
if __name__ == "__main__":
|
| 381 |
+
cli_detect()
|
.venv/lib/python3.13/site-packages/filelock-3.18.0.dist-info/licenses/LICENSE
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
This is free and unencumbered software released into the public domain.
|
| 2 |
+
|
| 3 |
+
Anyone is free to copy, modify, publish, use, compile, sell, or
|
| 4 |
+
distribute this software, either in source code form or as a compiled
|
| 5 |
+
binary, for any purpose, commercial or non-commercial, and by any
|
| 6 |
+
means.
|
| 7 |
+
|
| 8 |
+
In jurisdictions that recognize copyright laws, the author or authors
|
| 9 |
+
of this software dedicate any and all copyright interest in the
|
| 10 |
+
software to the public domain. We make this dedication for the benefit
|
| 11 |
+
of the public at large and to the detriment of our heirs and
|
| 12 |
+
successors. We intend this dedication to be an overt act of
|
| 13 |
+
relinquishment in perpetuity of all present and future rights to this
|
| 14 |
+
software under copyright law.
|
| 15 |
+
|
| 16 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 17 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 18 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
| 19 |
+
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
| 20 |
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
| 21 |
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 22 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 23 |
+
|
| 24 |
+
For more information, please refer to <http://unlicense.org>
|
.venv/lib/python3.13/site-packages/filelock/__pycache__/_soft.cpython-313.pyc
ADDED
|
Binary file (2.55 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/filelock/__pycache__/_windows.cpython-313.pyc
ADDED
|
Binary file (3.32 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/filelock/__pycache__/asyncio.cpython-313.pyc
ADDED
|
Binary file (15.5 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__init__.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
from abc import ABC, abstractmethod
|
| 16 |
+
from argparse import _SubParsersAction
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class BaseHuggingfaceCLICommand(ABC):
|
| 20 |
+
@staticmethod
|
| 21 |
+
@abstractmethod
|
| 22 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 23 |
+
raise NotImplementedError()
|
| 24 |
+
|
| 25 |
+
@abstractmethod
|
| 26 |
+
def run(self):
|
| 27 |
+
raise NotImplementedError()
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/__init__.cpython-313.pyc
ADDED
|
Binary file (1.05 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/_cli_utils.cpython-313.pyc
ADDED
|
Binary file (3.45 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/delete_cache.cpython-313.pyc
ADDED
|
Binary file (19.6 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/download.cpython-313.pyc
ADDED
|
Binary file (8.56 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/env.cpython-313.pyc
ADDED
|
Binary file (1.48 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/huggingface_cli.cpython-313.pyc
ADDED
|
Binary file (2.67 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/lfs.cpython-313.pyc
ADDED
|
Binary file (8.37 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/repo.cpython-313.pyc
ADDED
|
Binary file (6.78 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/repo_files.cpython-313.pyc
ADDED
|
Binary file (5.59 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/scan_cache.cpython-313.pyc
ADDED
|
Binary file (9.65 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/tag.cpython-313.pyc
ADDED
|
Binary file (10.4 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/upload.cpython-313.pyc
ADDED
|
Binary file (15 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/upload_large_folder.cpython-313.pyc
ADDED
|
Binary file (6.63 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/user.cpython-313.pyc
ADDED
|
Binary file (10.3 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/__pycache__/version.cpython-313.pyc
ADDED
|
Binary file (1.54 kB). View file
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/_cli_utils.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Contains a utility for good-looking prints."""
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
from typing import List, Union
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class ANSI:
|
| 21 |
+
"""
|
| 22 |
+
Helper for en.wikipedia.org/wiki/ANSI_escape_code
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
_bold = "\u001b[1m"
|
| 26 |
+
_gray = "\u001b[90m"
|
| 27 |
+
_red = "\u001b[31m"
|
| 28 |
+
_reset = "\u001b[0m"
|
| 29 |
+
_yellow = "\u001b[33m"
|
| 30 |
+
|
| 31 |
+
@classmethod
|
| 32 |
+
def bold(cls, s: str) -> str:
|
| 33 |
+
return cls._format(s, cls._bold)
|
| 34 |
+
|
| 35 |
+
@classmethod
|
| 36 |
+
def gray(cls, s: str) -> str:
|
| 37 |
+
return cls._format(s, cls._gray)
|
| 38 |
+
|
| 39 |
+
@classmethod
|
| 40 |
+
def red(cls, s: str) -> str:
|
| 41 |
+
return cls._format(s, cls._bold + cls._red)
|
| 42 |
+
|
| 43 |
+
@classmethod
|
| 44 |
+
def yellow(cls, s: str) -> str:
|
| 45 |
+
return cls._format(s, cls._yellow)
|
| 46 |
+
|
| 47 |
+
@classmethod
|
| 48 |
+
def _format(cls, s: str, code: str) -> str:
|
| 49 |
+
if os.environ.get("NO_COLOR"):
|
| 50 |
+
# See https://no-color.org/
|
| 51 |
+
return s
|
| 52 |
+
return f"{code}{s}{cls._reset}"
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def tabulate(rows: List[List[Union[str, int]]], headers: List[str]) -> str:
|
| 56 |
+
"""
|
| 57 |
+
Inspired by:
|
| 58 |
+
|
| 59 |
+
- stackoverflow.com/a/8356620/593036
|
| 60 |
+
- stackoverflow.com/questions/9535954/printing-lists-as-tabular-data
|
| 61 |
+
"""
|
| 62 |
+
col_widths = [max(len(str(x)) for x in col) for col in zip(*rows, headers)]
|
| 63 |
+
row_format = ("{{:{}}} " * len(headers)).format(*col_widths)
|
| 64 |
+
lines = []
|
| 65 |
+
lines.append(row_format.format(*headers))
|
| 66 |
+
lines.append(row_format.format(*["-" * w for w in col_widths]))
|
| 67 |
+
for row in rows:
|
| 68 |
+
lines.append(row_format.format(*row))
|
| 69 |
+
return "\n".join(lines)
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/delete_cache.py
ADDED
|
@@ -0,0 +1,474 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2022-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains command to delete some revisions from the HF cache directory.
|
| 16 |
+
|
| 17 |
+
Usage:
|
| 18 |
+
huggingface-cli delete-cache
|
| 19 |
+
huggingface-cli delete-cache --disable-tui
|
| 20 |
+
huggingface-cli delete-cache --dir ~/.cache/huggingface/hub
|
| 21 |
+
huggingface-cli delete-cache --sort=size
|
| 22 |
+
|
| 23 |
+
NOTE:
|
| 24 |
+
This command is based on `InquirerPy` to build the multiselect menu in the terminal.
|
| 25 |
+
This dependency has to be installed with `pip install huggingface_hub[cli]`. Since
|
| 26 |
+
we want to avoid as much as possible cross-platform issues, I chose a library that
|
| 27 |
+
is built on top of `python-prompt-toolkit` which seems to be a reference in terminal
|
| 28 |
+
GUI (actively maintained on both Unix and Windows, 7.9k stars).
|
| 29 |
+
|
| 30 |
+
For the moment, the TUI feature is in beta.
|
| 31 |
+
|
| 32 |
+
See:
|
| 33 |
+
- https://github.com/kazhala/InquirerPy
|
| 34 |
+
- https://inquirerpy.readthedocs.io/en/latest/
|
| 35 |
+
- https://github.com/prompt-toolkit/python-prompt-toolkit
|
| 36 |
+
|
| 37 |
+
Other solutions could have been:
|
| 38 |
+
- `simple_term_menu`: would be good as well for our use case but some issues suggest
|
| 39 |
+
that Windows is less supported.
|
| 40 |
+
See: https://github.com/IngoMeyer441/simple-term-menu
|
| 41 |
+
- `PyInquirer`: very similar to `InquirerPy` but older and not maintained anymore.
|
| 42 |
+
In particular, no support of Python3.10.
|
| 43 |
+
See: https://github.com/CITGuru/PyInquirer
|
| 44 |
+
- `pick` (or `pickpack`): easy to use and flexible but built on top of Python's
|
| 45 |
+
standard library `curses` that is specific to Unix (not implemented on Windows).
|
| 46 |
+
See https://github.com/wong2/pick and https://github.com/anafvana/pickpack.
|
| 47 |
+
- `inquirer`: lot of traction (700 stars) but explicitly states "experimental
|
| 48 |
+
support of Windows". Not built on top of `python-prompt-toolkit`.
|
| 49 |
+
See https://github.com/magmax/python-inquirer
|
| 50 |
+
|
| 51 |
+
TODO: add support for `huggingface-cli delete-cache aaaaaa bbbbbb cccccc (...)` ?
|
| 52 |
+
TODO: add "--keep-last" arg to delete revisions that are not on `main` ref
|
| 53 |
+
TODO: add "--filter" arg to filter repositories by name ?
|
| 54 |
+
TODO: add "--limit" arg to limit to X repos ?
|
| 55 |
+
TODO: add "-y" arg for immediate deletion ?
|
| 56 |
+
See discussions in https://github.com/huggingface/huggingface_hub/issues/1025.
|
| 57 |
+
"""
|
| 58 |
+
|
| 59 |
+
import os
|
| 60 |
+
from argparse import Namespace, _SubParsersAction
|
| 61 |
+
from functools import wraps
|
| 62 |
+
from tempfile import mkstemp
|
| 63 |
+
from typing import Any, Callable, Iterable, List, Literal, Optional, Union
|
| 64 |
+
|
| 65 |
+
from ..utils import CachedRepoInfo, CachedRevisionInfo, HFCacheInfo, scan_cache_dir
|
| 66 |
+
from . import BaseHuggingfaceCLICommand
|
| 67 |
+
from ._cli_utils import ANSI
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
try:
|
| 71 |
+
from InquirerPy import inquirer
|
| 72 |
+
from InquirerPy.base.control import Choice
|
| 73 |
+
from InquirerPy.separator import Separator
|
| 74 |
+
|
| 75 |
+
_inquirer_py_available = True
|
| 76 |
+
except ImportError:
|
| 77 |
+
_inquirer_py_available = False
|
| 78 |
+
|
| 79 |
+
SortingOption_T = Literal["alphabetical", "lastUpdated", "lastUsed", "size"]
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def require_inquirer_py(fn: Callable) -> Callable:
|
| 83 |
+
"""Decorator to flag methods that require `InquirerPy`."""
|
| 84 |
+
|
| 85 |
+
# TODO: refactor this + imports in a unified pattern across codebase
|
| 86 |
+
@wraps(fn)
|
| 87 |
+
def _inner(*args, **kwargs):
|
| 88 |
+
if not _inquirer_py_available:
|
| 89 |
+
raise ImportError(
|
| 90 |
+
"The `delete-cache` command requires extra dependencies to work with"
|
| 91 |
+
" the TUI.\nPlease run `pip install huggingface_hub[cli]` to install"
|
| 92 |
+
" them.\nOtherwise, disable TUI using the `--disable-tui` flag."
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
return fn(*args, **kwargs)
|
| 96 |
+
|
| 97 |
+
return _inner
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
# Possibility for the user to cancel deletion
|
| 101 |
+
_CANCEL_DELETION_STR = "CANCEL_DELETION"
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class DeleteCacheCommand(BaseHuggingfaceCLICommand):
|
| 105 |
+
@staticmethod
|
| 106 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 107 |
+
delete_cache_parser = parser.add_parser("delete-cache", help="Delete revisions from the cache directory.")
|
| 108 |
+
|
| 109 |
+
delete_cache_parser.add_argument(
|
| 110 |
+
"--dir",
|
| 111 |
+
type=str,
|
| 112 |
+
default=None,
|
| 113 |
+
help="cache directory (optional). Default to the default HuggingFace cache.",
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
delete_cache_parser.add_argument(
|
| 117 |
+
"--disable-tui",
|
| 118 |
+
action="store_true",
|
| 119 |
+
help=(
|
| 120 |
+
"Disable Terminal User Interface (TUI) mode. Useful if your"
|
| 121 |
+
" platform/terminal doesn't support the multiselect menu."
|
| 122 |
+
),
|
| 123 |
+
)
|
| 124 |
+
|
| 125 |
+
delete_cache_parser.add_argument(
|
| 126 |
+
"--sort",
|
| 127 |
+
nargs="?",
|
| 128 |
+
choices=["alphabetical", "lastUpdated", "lastUsed", "size"],
|
| 129 |
+
help=(
|
| 130 |
+
"Sort repositories by the specified criteria. Options: "
|
| 131 |
+
"'alphabetical' (A-Z), "
|
| 132 |
+
"'lastUpdated' (newest first), "
|
| 133 |
+
"'lastUsed' (most recent first), "
|
| 134 |
+
"'size' (largest first)."
|
| 135 |
+
),
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
delete_cache_parser.set_defaults(func=DeleteCacheCommand)
|
| 139 |
+
|
| 140 |
+
def __init__(self, args: Namespace) -> None:
|
| 141 |
+
self.cache_dir: Optional[str] = args.dir
|
| 142 |
+
self.disable_tui: bool = args.disable_tui
|
| 143 |
+
self.sort_by: Optional[SortingOption_T] = args.sort
|
| 144 |
+
|
| 145 |
+
def run(self):
|
| 146 |
+
"""Run `delete-cache` command with or without TUI."""
|
| 147 |
+
# Scan cache directory
|
| 148 |
+
hf_cache_info = scan_cache_dir(self.cache_dir)
|
| 149 |
+
|
| 150 |
+
# Manual review from the user
|
| 151 |
+
if self.disable_tui:
|
| 152 |
+
selected_hashes = _manual_review_no_tui(hf_cache_info, preselected=[], sort_by=self.sort_by)
|
| 153 |
+
else:
|
| 154 |
+
selected_hashes = _manual_review_tui(hf_cache_info, preselected=[], sort_by=self.sort_by)
|
| 155 |
+
|
| 156 |
+
# If deletion is not cancelled
|
| 157 |
+
if len(selected_hashes) > 0 and _CANCEL_DELETION_STR not in selected_hashes:
|
| 158 |
+
confirm_message = _get_expectations_str(hf_cache_info, selected_hashes) + " Confirm deletion ?"
|
| 159 |
+
|
| 160 |
+
# Confirm deletion
|
| 161 |
+
if self.disable_tui:
|
| 162 |
+
confirmed = _ask_for_confirmation_no_tui(confirm_message)
|
| 163 |
+
else:
|
| 164 |
+
confirmed = _ask_for_confirmation_tui(confirm_message)
|
| 165 |
+
|
| 166 |
+
# Deletion is confirmed
|
| 167 |
+
if confirmed:
|
| 168 |
+
strategy = hf_cache_info.delete_revisions(*selected_hashes)
|
| 169 |
+
print("Start deletion.")
|
| 170 |
+
strategy.execute()
|
| 171 |
+
print(
|
| 172 |
+
f"Done. Deleted {len(strategy.repos)} repo(s) and"
|
| 173 |
+
f" {len(strategy.snapshots)} revision(s) for a total of"
|
| 174 |
+
f" {strategy.expected_freed_size_str}."
|
| 175 |
+
)
|
| 176 |
+
return
|
| 177 |
+
|
| 178 |
+
# Deletion is cancelled
|
| 179 |
+
print("Deletion is cancelled. Do nothing.")
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
def _get_repo_sorting_key(repo: CachedRepoInfo, sort_by: Optional[SortingOption_T] = None):
|
| 183 |
+
if sort_by == "alphabetical":
|
| 184 |
+
return (repo.repo_type, repo.repo_id.lower()) # by type then name
|
| 185 |
+
elif sort_by == "lastUpdated":
|
| 186 |
+
return -max(rev.last_modified for rev in repo.revisions) # newest first
|
| 187 |
+
elif sort_by == "lastUsed":
|
| 188 |
+
return -repo.last_accessed # most recently used first
|
| 189 |
+
elif sort_by == "size":
|
| 190 |
+
return -repo.size_on_disk # largest first
|
| 191 |
+
else:
|
| 192 |
+
return (repo.repo_type, repo.repo_id) # default stable order
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
@require_inquirer_py
|
| 196 |
+
def _manual_review_tui(
|
| 197 |
+
hf_cache_info: HFCacheInfo,
|
| 198 |
+
preselected: List[str],
|
| 199 |
+
sort_by: Optional[SortingOption_T] = None,
|
| 200 |
+
) -> List[str]:
|
| 201 |
+
"""Ask the user for a manual review of the revisions to delete.
|
| 202 |
+
|
| 203 |
+
Displays a multi-select menu in the terminal (TUI).
|
| 204 |
+
"""
|
| 205 |
+
# Define multiselect list
|
| 206 |
+
choices = _get_tui_choices_from_scan(
|
| 207 |
+
repos=hf_cache_info.repos,
|
| 208 |
+
preselected=preselected,
|
| 209 |
+
sort_by=sort_by,
|
| 210 |
+
)
|
| 211 |
+
checkbox = inquirer.checkbox(
|
| 212 |
+
message="Select revisions to delete:",
|
| 213 |
+
choices=choices, # List of revisions with some pre-selection
|
| 214 |
+
cycle=False, # No loop between top and bottom
|
| 215 |
+
height=100, # Large list if possible
|
| 216 |
+
# We use the instruction to display to the user the expected effect of the
|
| 217 |
+
# deletion.
|
| 218 |
+
instruction=_get_expectations_str(
|
| 219 |
+
hf_cache_info,
|
| 220 |
+
selected_hashes=[c.value for c in choices if isinstance(c, Choice) and c.enabled],
|
| 221 |
+
),
|
| 222 |
+
# We use the long instruction to should keybindings instructions to the user
|
| 223 |
+
long_instruction="Press <space> to select, <enter> to validate and <ctrl+c> to quit without modification.",
|
| 224 |
+
# Message that is displayed once the user validates its selection.
|
| 225 |
+
transformer=lambda result: f"{len(result)} revision(s) selected.",
|
| 226 |
+
)
|
| 227 |
+
|
| 228 |
+
# Add a callback to update the information line when a revision is
|
| 229 |
+
# selected/unselected
|
| 230 |
+
def _update_expectations(_) -> None:
|
| 231 |
+
# Hacky way to dynamically set an instruction message to the checkbox when
|
| 232 |
+
# a revision hash is selected/unselected.
|
| 233 |
+
checkbox._instruction = _get_expectations_str(
|
| 234 |
+
hf_cache_info,
|
| 235 |
+
selected_hashes=[choice["value"] for choice in checkbox.content_control.choices if choice["enabled"]],
|
| 236 |
+
)
|
| 237 |
+
|
| 238 |
+
checkbox.kb_func_lookup["toggle"].append({"func": _update_expectations})
|
| 239 |
+
|
| 240 |
+
# Finally display the form to the user.
|
| 241 |
+
try:
|
| 242 |
+
return checkbox.execute()
|
| 243 |
+
except KeyboardInterrupt:
|
| 244 |
+
return [] # Quit without deletion
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
@require_inquirer_py
|
| 248 |
+
def _ask_for_confirmation_tui(message: str, default: bool = True) -> bool:
|
| 249 |
+
"""Ask for confirmation using Inquirer."""
|
| 250 |
+
return inquirer.confirm(message, default=default).execute()
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
def _get_tui_choices_from_scan(
|
| 254 |
+
repos: Iterable[CachedRepoInfo],
|
| 255 |
+
preselected: List[str],
|
| 256 |
+
sort_by: Optional[SortingOption_T] = None,
|
| 257 |
+
) -> List:
|
| 258 |
+
"""Build a list of choices from the scanned repos.
|
| 259 |
+
|
| 260 |
+
Args:
|
| 261 |
+
repos (*Iterable[`CachedRepoInfo`]*):
|
| 262 |
+
List of scanned repos on which we want to delete revisions.
|
| 263 |
+
preselected (*List[`str`]*):
|
| 264 |
+
List of revision hashes that will be preselected.
|
| 265 |
+
sort_by (*Optional[SortingOption_T]*):
|
| 266 |
+
Sorting direction. Choices: "alphabetical", "lastUpdated", "lastUsed", "size".
|
| 267 |
+
|
| 268 |
+
Return:
|
| 269 |
+
The list of choices to pass to `inquirer.checkbox`.
|
| 270 |
+
"""
|
| 271 |
+
choices: List[Union[Choice, Separator]] = []
|
| 272 |
+
|
| 273 |
+
# First choice is to cancel the deletion
|
| 274 |
+
choices.append(
|
| 275 |
+
Choice(
|
| 276 |
+
_CANCEL_DELETION_STR,
|
| 277 |
+
name="None of the following (if selected, nothing will be deleted).",
|
| 278 |
+
enabled=False,
|
| 279 |
+
)
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
# Sort repos based on specified criteria
|
| 283 |
+
sorted_repos = sorted(repos, key=lambda repo: _get_repo_sorting_key(repo, sort_by))
|
| 284 |
+
|
| 285 |
+
for repo in sorted_repos:
|
| 286 |
+
# Repo as separator
|
| 287 |
+
choices.append(
|
| 288 |
+
Separator(
|
| 289 |
+
f"\n{repo.repo_type.capitalize()} {repo.repo_id} ({repo.size_on_disk_str},"
|
| 290 |
+
f" used {repo.last_accessed_str})"
|
| 291 |
+
)
|
| 292 |
+
)
|
| 293 |
+
for revision in sorted(repo.revisions, key=_revision_sorting_order):
|
| 294 |
+
# Revision as choice
|
| 295 |
+
choices.append(
|
| 296 |
+
Choice(
|
| 297 |
+
revision.commit_hash,
|
| 298 |
+
name=(
|
| 299 |
+
f"{revision.commit_hash[:8]}:"
|
| 300 |
+
f" {', '.join(sorted(revision.refs)) or '(detached)'} #"
|
| 301 |
+
f" modified {revision.last_modified_str}"
|
| 302 |
+
),
|
| 303 |
+
enabled=revision.commit_hash in preselected,
|
| 304 |
+
)
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
+
# Return choices
|
| 308 |
+
return choices
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
def _manual_review_no_tui(
|
| 312 |
+
hf_cache_info: HFCacheInfo,
|
| 313 |
+
preselected: List[str],
|
| 314 |
+
sort_by: Optional[SortingOption_T] = None,
|
| 315 |
+
) -> List[str]:
|
| 316 |
+
"""Ask the user for a manual review of the revisions to delete.
|
| 317 |
+
|
| 318 |
+
Used when TUI is disabled. Manual review happens in a separate tmp file that the
|
| 319 |
+
user can manually edit.
|
| 320 |
+
"""
|
| 321 |
+
# 1. Generate temporary file with delete commands.
|
| 322 |
+
fd, tmp_path = mkstemp(suffix=".txt") # suffix to make it easier to find by editors
|
| 323 |
+
os.close(fd)
|
| 324 |
+
|
| 325 |
+
lines = []
|
| 326 |
+
|
| 327 |
+
sorted_repos = sorted(hf_cache_info.repos, key=lambda repo: _get_repo_sorting_key(repo, sort_by))
|
| 328 |
+
|
| 329 |
+
for repo in sorted_repos:
|
| 330 |
+
lines.append(
|
| 331 |
+
f"\n# {repo.repo_type.capitalize()} {repo.repo_id} ({repo.size_on_disk_str},"
|
| 332 |
+
f" used {repo.last_accessed_str})"
|
| 333 |
+
)
|
| 334 |
+
for revision in sorted(repo.revisions, key=_revision_sorting_order):
|
| 335 |
+
lines.append(
|
| 336 |
+
# Deselect by prepending a '#'
|
| 337 |
+
f"{'' if revision.commit_hash in preselected else '#'} "
|
| 338 |
+
f" {revision.commit_hash} # Refs:"
|
| 339 |
+
# Print `refs` as comment on same line
|
| 340 |
+
f" {', '.join(sorted(revision.refs)) or '(detached)'} # modified"
|
| 341 |
+
# Print `last_modified` as comment on same line
|
| 342 |
+
f" {revision.last_modified_str}"
|
| 343 |
+
)
|
| 344 |
+
|
| 345 |
+
with open(tmp_path, "w") as f:
|
| 346 |
+
f.write(_MANUAL_REVIEW_NO_TUI_INSTRUCTIONS)
|
| 347 |
+
f.write("\n".join(lines))
|
| 348 |
+
|
| 349 |
+
# 2. Prompt instructions to user.
|
| 350 |
+
instructions = f"""
|
| 351 |
+
TUI is disabled. In order to select which revisions you want to delete, please edit
|
| 352 |
+
the following file using the text editor of your choice. Instructions for manual
|
| 353 |
+
editing are located at the beginning of the file. Edit the file, save it and confirm
|
| 354 |
+
to continue.
|
| 355 |
+
File to edit: {ANSI.bold(tmp_path)}
|
| 356 |
+
"""
|
| 357 |
+
print("\n".join(line.strip() for line in instructions.strip().split("\n")))
|
| 358 |
+
|
| 359 |
+
# 3. Wait for user confirmation.
|
| 360 |
+
while True:
|
| 361 |
+
selected_hashes = _read_manual_review_tmp_file(tmp_path)
|
| 362 |
+
if _ask_for_confirmation_no_tui(
|
| 363 |
+
_get_expectations_str(hf_cache_info, selected_hashes) + " Continue ?",
|
| 364 |
+
default=False,
|
| 365 |
+
):
|
| 366 |
+
break
|
| 367 |
+
|
| 368 |
+
# 4. Return selected_hashes sorted to maintain stable order
|
| 369 |
+
os.remove(tmp_path)
|
| 370 |
+
return sorted(selected_hashes) # Sort to maintain stable order
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
def _ask_for_confirmation_no_tui(message: str, default: bool = True) -> bool:
|
| 374 |
+
"""Ask for confirmation using pure-python."""
|
| 375 |
+
YES = ("y", "yes", "1")
|
| 376 |
+
NO = ("n", "no", "0")
|
| 377 |
+
DEFAULT = ""
|
| 378 |
+
ALL = YES + NO + (DEFAULT,)
|
| 379 |
+
full_message = message + (" (Y/n) " if default else " (y/N) ")
|
| 380 |
+
while True:
|
| 381 |
+
answer = input(full_message).lower()
|
| 382 |
+
if answer == DEFAULT:
|
| 383 |
+
return default
|
| 384 |
+
if answer in YES:
|
| 385 |
+
return True
|
| 386 |
+
if answer in NO:
|
| 387 |
+
return False
|
| 388 |
+
print(f"Invalid input. Must be one of {ALL}")
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
def _get_expectations_str(hf_cache_info: HFCacheInfo, selected_hashes: List[str]) -> str:
|
| 392 |
+
"""Format a string to display to the user how much space would be saved.
|
| 393 |
+
|
| 394 |
+
Example:
|
| 395 |
+
```
|
| 396 |
+
>>> _get_expectations_str(hf_cache_info, selected_hashes)
|
| 397 |
+
'7 revisions selected counting for 4.3G.'
|
| 398 |
+
```
|
| 399 |
+
"""
|
| 400 |
+
if _CANCEL_DELETION_STR in selected_hashes:
|
| 401 |
+
return "Nothing will be deleted."
|
| 402 |
+
strategy = hf_cache_info.delete_revisions(*selected_hashes)
|
| 403 |
+
return f"{len(selected_hashes)} revisions selected counting for {strategy.expected_freed_size_str}."
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def _read_manual_review_tmp_file(tmp_path: str) -> List[str]:
|
| 407 |
+
"""Read the manually reviewed instruction file and return a list of revision hash.
|
| 408 |
+
|
| 409 |
+
Example:
|
| 410 |
+
```txt
|
| 411 |
+
# This is the tmp file content
|
| 412 |
+
###
|
| 413 |
+
|
| 414 |
+
# Commented out line
|
| 415 |
+
123456789 # revision hash
|
| 416 |
+
|
| 417 |
+
# Something else
|
| 418 |
+
# a_newer_hash # 2 days ago
|
| 419 |
+
an_older_hash # 3 days ago
|
| 420 |
+
```
|
| 421 |
+
|
| 422 |
+
```py
|
| 423 |
+
>>> _read_manual_review_tmp_file(tmp_path)
|
| 424 |
+
['123456789', 'an_older_hash']
|
| 425 |
+
```
|
| 426 |
+
"""
|
| 427 |
+
with open(tmp_path) as f:
|
| 428 |
+
content = f.read()
|
| 429 |
+
|
| 430 |
+
# Split lines
|
| 431 |
+
lines = [line.strip() for line in content.split("\n")]
|
| 432 |
+
|
| 433 |
+
# Filter commented lines
|
| 434 |
+
selected_lines = [line for line in lines if not line.startswith("#")]
|
| 435 |
+
|
| 436 |
+
# Select only before comment
|
| 437 |
+
selected_hashes = [line.split("#")[0].strip() for line in selected_lines]
|
| 438 |
+
|
| 439 |
+
# Return revision hashes
|
| 440 |
+
return [hash for hash in selected_hashes if len(hash) > 0]
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
_MANUAL_REVIEW_NO_TUI_INSTRUCTIONS = f"""
|
| 444 |
+
# INSTRUCTIONS
|
| 445 |
+
# ------------
|
| 446 |
+
# This is a temporary file created by running `huggingface-cli delete-cache` with the
|
| 447 |
+
# `--disable-tui` option. It contains a set of revisions that can be deleted from your
|
| 448 |
+
# local cache directory.
|
| 449 |
+
#
|
| 450 |
+
# Please manually review the revisions you want to delete:
|
| 451 |
+
# - Revision hashes can be commented out with '#'.
|
| 452 |
+
# - Only non-commented revisions in this file will be deleted.
|
| 453 |
+
# - Revision hashes that are removed from this file are ignored as well.
|
| 454 |
+
# - If `{_CANCEL_DELETION_STR}` line is uncommented, the all cache deletion is cancelled and
|
| 455 |
+
# no changes will be applied.
|
| 456 |
+
#
|
| 457 |
+
# Once you've manually reviewed this file, please confirm deletion in the terminal. This
|
| 458 |
+
# file will be automatically removed once done.
|
| 459 |
+
# ------------
|
| 460 |
+
|
| 461 |
+
# KILL SWITCH
|
| 462 |
+
# ------------
|
| 463 |
+
# Un-comment following line to completely cancel the deletion process
|
| 464 |
+
# {_CANCEL_DELETION_STR}
|
| 465 |
+
# ------------
|
| 466 |
+
|
| 467 |
+
# REVISIONS
|
| 468 |
+
# ------------
|
| 469 |
+
""".strip()
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
def _revision_sorting_order(revision: CachedRevisionInfo) -> Any:
|
| 473 |
+
# Sort by last modified (oldest first)
|
| 474 |
+
return revision.last_modified
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/download.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2023-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains command to download files from the Hub with the CLI.
|
| 16 |
+
|
| 17 |
+
Usage:
|
| 18 |
+
huggingface-cli download --help
|
| 19 |
+
|
| 20 |
+
# Download file
|
| 21 |
+
huggingface-cli download gpt2 config.json
|
| 22 |
+
|
| 23 |
+
# Download entire repo
|
| 24 |
+
huggingface-cli download fffiloni/zeroscope --repo-type=space --revision=refs/pr/78
|
| 25 |
+
|
| 26 |
+
# Download repo with filters
|
| 27 |
+
huggingface-cli download gpt2 --include="*.safetensors"
|
| 28 |
+
|
| 29 |
+
# Download with token
|
| 30 |
+
huggingface-cli download Wauplin/private-model --token=hf_***
|
| 31 |
+
|
| 32 |
+
# Download quietly (no progress bar, no warnings, only the returned path)
|
| 33 |
+
huggingface-cli download gpt2 config.json --quiet
|
| 34 |
+
|
| 35 |
+
# Download to local dir
|
| 36 |
+
huggingface-cli download gpt2 --local-dir=./models/gpt2
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
import warnings
|
| 40 |
+
from argparse import Namespace, _SubParsersAction
|
| 41 |
+
from typing import List, Optional
|
| 42 |
+
|
| 43 |
+
from huggingface_hub import logging
|
| 44 |
+
from huggingface_hub._snapshot_download import snapshot_download
|
| 45 |
+
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
| 46 |
+
from huggingface_hub.file_download import hf_hub_download
|
| 47 |
+
from huggingface_hub.utils import disable_progress_bars, enable_progress_bars
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
logger = logging.get_logger(__name__)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class DownloadCommand(BaseHuggingfaceCLICommand):
|
| 54 |
+
@staticmethod
|
| 55 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 56 |
+
download_parser = parser.add_parser("download", help="Download files from the Hub")
|
| 57 |
+
download_parser.add_argument(
|
| 58 |
+
"repo_id", type=str, help="ID of the repo to download from (e.g. `username/repo-name`)."
|
| 59 |
+
)
|
| 60 |
+
download_parser.add_argument(
|
| 61 |
+
"filenames", type=str, nargs="*", help="Files to download (e.g. `config.json`, `data/metadata.jsonl`)."
|
| 62 |
+
)
|
| 63 |
+
download_parser.add_argument(
|
| 64 |
+
"--repo-type",
|
| 65 |
+
choices=["model", "dataset", "space"],
|
| 66 |
+
default="model",
|
| 67 |
+
help="Type of repo to download from (defaults to 'model').",
|
| 68 |
+
)
|
| 69 |
+
download_parser.add_argument(
|
| 70 |
+
"--revision",
|
| 71 |
+
type=str,
|
| 72 |
+
help="An optional Git revision id which can be a branch name, a tag, or a commit hash.",
|
| 73 |
+
)
|
| 74 |
+
download_parser.add_argument(
|
| 75 |
+
"--include", nargs="*", type=str, help="Glob patterns to match files to download."
|
| 76 |
+
)
|
| 77 |
+
download_parser.add_argument(
|
| 78 |
+
"--exclude", nargs="*", type=str, help="Glob patterns to exclude from files to download."
|
| 79 |
+
)
|
| 80 |
+
download_parser.add_argument(
|
| 81 |
+
"--cache-dir", type=str, help="Path to the directory where to save the downloaded files."
|
| 82 |
+
)
|
| 83 |
+
download_parser.add_argument(
|
| 84 |
+
"--local-dir",
|
| 85 |
+
type=str,
|
| 86 |
+
help=(
|
| 87 |
+
"If set, the downloaded file will be placed under this directory. Check out"
|
| 88 |
+
" https://huggingface.co/docs/huggingface_hub/guides/download#download-files-to-local-folder for more"
|
| 89 |
+
" details."
|
| 90 |
+
),
|
| 91 |
+
)
|
| 92 |
+
download_parser.add_argument(
|
| 93 |
+
"--local-dir-use-symlinks",
|
| 94 |
+
choices=["auto", "True", "False"],
|
| 95 |
+
help=("Deprecated and ignored. Downloading to a local directory does not use symlinks anymore."),
|
| 96 |
+
)
|
| 97 |
+
download_parser.add_argument(
|
| 98 |
+
"--force-download",
|
| 99 |
+
action="store_true",
|
| 100 |
+
help="If True, the files will be downloaded even if they are already cached.",
|
| 101 |
+
)
|
| 102 |
+
download_parser.add_argument(
|
| 103 |
+
"--resume-download",
|
| 104 |
+
action="store_true",
|
| 105 |
+
help="Deprecated and ignored. Downloading a file to local dir always attempts to resume previously interrupted downloads (unless hf-transfer is enabled).",
|
| 106 |
+
)
|
| 107 |
+
download_parser.add_argument(
|
| 108 |
+
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
| 109 |
+
)
|
| 110 |
+
download_parser.add_argument(
|
| 111 |
+
"--quiet",
|
| 112 |
+
action="store_true",
|
| 113 |
+
help="If True, progress bars are disabled and only the path to the download files is printed.",
|
| 114 |
+
)
|
| 115 |
+
download_parser.add_argument(
|
| 116 |
+
"--max-workers",
|
| 117 |
+
type=int,
|
| 118 |
+
default=8,
|
| 119 |
+
help="Maximum number of workers to use for downloading files. Default is 8.",
|
| 120 |
+
)
|
| 121 |
+
download_parser.set_defaults(func=DownloadCommand)
|
| 122 |
+
|
| 123 |
+
def __init__(self, args: Namespace) -> None:
|
| 124 |
+
self.token = args.token
|
| 125 |
+
self.repo_id: str = args.repo_id
|
| 126 |
+
self.filenames: List[str] = args.filenames
|
| 127 |
+
self.repo_type: str = args.repo_type
|
| 128 |
+
self.revision: Optional[str] = args.revision
|
| 129 |
+
self.include: Optional[List[str]] = args.include
|
| 130 |
+
self.exclude: Optional[List[str]] = args.exclude
|
| 131 |
+
self.cache_dir: Optional[str] = args.cache_dir
|
| 132 |
+
self.local_dir: Optional[str] = args.local_dir
|
| 133 |
+
self.force_download: bool = args.force_download
|
| 134 |
+
self.resume_download: Optional[bool] = args.resume_download or None
|
| 135 |
+
self.quiet: bool = args.quiet
|
| 136 |
+
self.max_workers: int = args.max_workers
|
| 137 |
+
|
| 138 |
+
if args.local_dir_use_symlinks is not None:
|
| 139 |
+
warnings.warn(
|
| 140 |
+
"Ignoring --local-dir-use-symlinks. Downloading to a local directory does not use symlinks anymore.",
|
| 141 |
+
FutureWarning,
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
def run(self) -> None:
|
| 145 |
+
if self.quiet:
|
| 146 |
+
disable_progress_bars()
|
| 147 |
+
with warnings.catch_warnings():
|
| 148 |
+
warnings.simplefilter("ignore")
|
| 149 |
+
print(self._download()) # Print path to downloaded files
|
| 150 |
+
enable_progress_bars()
|
| 151 |
+
else:
|
| 152 |
+
logging.set_verbosity_info()
|
| 153 |
+
print(self._download()) # Print path to downloaded files
|
| 154 |
+
logging.set_verbosity_warning()
|
| 155 |
+
|
| 156 |
+
def _download(self) -> str:
|
| 157 |
+
# Warn user if patterns are ignored
|
| 158 |
+
if len(self.filenames) > 0:
|
| 159 |
+
if self.include is not None and len(self.include) > 0:
|
| 160 |
+
warnings.warn("Ignoring `--include` since filenames have being explicitly set.")
|
| 161 |
+
if self.exclude is not None and len(self.exclude) > 0:
|
| 162 |
+
warnings.warn("Ignoring `--exclude` since filenames have being explicitly set.")
|
| 163 |
+
|
| 164 |
+
# Single file to download: use `hf_hub_download`
|
| 165 |
+
if len(self.filenames) == 1:
|
| 166 |
+
return hf_hub_download(
|
| 167 |
+
repo_id=self.repo_id,
|
| 168 |
+
repo_type=self.repo_type,
|
| 169 |
+
revision=self.revision,
|
| 170 |
+
filename=self.filenames[0],
|
| 171 |
+
cache_dir=self.cache_dir,
|
| 172 |
+
resume_download=self.resume_download,
|
| 173 |
+
force_download=self.force_download,
|
| 174 |
+
token=self.token,
|
| 175 |
+
local_dir=self.local_dir,
|
| 176 |
+
library_name="huggingface-cli",
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
# Otherwise: use `snapshot_download` to ensure all files comes from same revision
|
| 180 |
+
elif len(self.filenames) == 0:
|
| 181 |
+
allow_patterns = self.include
|
| 182 |
+
ignore_patterns = self.exclude
|
| 183 |
+
else:
|
| 184 |
+
allow_patterns = self.filenames
|
| 185 |
+
ignore_patterns = None
|
| 186 |
+
|
| 187 |
+
return snapshot_download(
|
| 188 |
+
repo_id=self.repo_id,
|
| 189 |
+
repo_type=self.repo_type,
|
| 190 |
+
revision=self.revision,
|
| 191 |
+
allow_patterns=allow_patterns,
|
| 192 |
+
ignore_patterns=ignore_patterns,
|
| 193 |
+
resume_download=self.resume_download,
|
| 194 |
+
force_download=self.force_download,
|
| 195 |
+
cache_dir=self.cache_dir,
|
| 196 |
+
token=self.token,
|
| 197 |
+
local_dir=self.local_dir,
|
| 198 |
+
library_name="huggingface-cli",
|
| 199 |
+
max_workers=self.max_workers,
|
| 200 |
+
)
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/env.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Contains command to print information about the environment.
|
| 15 |
+
|
| 16 |
+
Usage:
|
| 17 |
+
huggingface-cli env
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
from argparse import _SubParsersAction
|
| 21 |
+
|
| 22 |
+
from ..utils import dump_environment_info
|
| 23 |
+
from . import BaseHuggingfaceCLICommand
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class EnvironmentCommand(BaseHuggingfaceCLICommand):
|
| 27 |
+
def __init__(self, args):
|
| 28 |
+
self.args = args
|
| 29 |
+
|
| 30 |
+
@staticmethod
|
| 31 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 32 |
+
env_parser = parser.add_parser("env", help="Print information about the environment.")
|
| 33 |
+
env_parser.set_defaults(func=EnvironmentCommand)
|
| 34 |
+
|
| 35 |
+
def run(self) -> None:
|
| 36 |
+
dump_environment_info()
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/huggingface_cli.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
from argparse import ArgumentParser
|
| 16 |
+
|
| 17 |
+
from huggingface_hub.commands.delete_cache import DeleteCacheCommand
|
| 18 |
+
from huggingface_hub.commands.download import DownloadCommand
|
| 19 |
+
from huggingface_hub.commands.env import EnvironmentCommand
|
| 20 |
+
from huggingface_hub.commands.lfs import LfsCommands
|
| 21 |
+
from huggingface_hub.commands.repo import RepoCommands
|
| 22 |
+
from huggingface_hub.commands.repo_files import RepoFilesCommand
|
| 23 |
+
from huggingface_hub.commands.scan_cache import ScanCacheCommand
|
| 24 |
+
from huggingface_hub.commands.tag import TagCommands
|
| 25 |
+
from huggingface_hub.commands.upload import UploadCommand
|
| 26 |
+
from huggingface_hub.commands.upload_large_folder import UploadLargeFolderCommand
|
| 27 |
+
from huggingface_hub.commands.user import UserCommands
|
| 28 |
+
from huggingface_hub.commands.version import VersionCommand
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def main():
|
| 32 |
+
parser = ArgumentParser("huggingface-cli", usage="huggingface-cli <command> [<args>]")
|
| 33 |
+
commands_parser = parser.add_subparsers(help="huggingface-cli command helpers")
|
| 34 |
+
|
| 35 |
+
# Register commands
|
| 36 |
+
DownloadCommand.register_subcommand(commands_parser)
|
| 37 |
+
UploadCommand.register_subcommand(commands_parser)
|
| 38 |
+
RepoFilesCommand.register_subcommand(commands_parser)
|
| 39 |
+
EnvironmentCommand.register_subcommand(commands_parser)
|
| 40 |
+
UserCommands.register_subcommand(commands_parser)
|
| 41 |
+
RepoCommands.register_subcommand(commands_parser)
|
| 42 |
+
LfsCommands.register_subcommand(commands_parser)
|
| 43 |
+
ScanCacheCommand.register_subcommand(commands_parser)
|
| 44 |
+
DeleteCacheCommand.register_subcommand(commands_parser)
|
| 45 |
+
TagCommands.register_subcommand(commands_parser)
|
| 46 |
+
VersionCommand.register_subcommand(commands_parser)
|
| 47 |
+
|
| 48 |
+
# Experimental
|
| 49 |
+
UploadLargeFolderCommand.register_subcommand(commands_parser)
|
| 50 |
+
|
| 51 |
+
# Let's go
|
| 52 |
+
args = parser.parse_args()
|
| 53 |
+
if not hasattr(args, "func"):
|
| 54 |
+
parser.print_help()
|
| 55 |
+
exit(1)
|
| 56 |
+
|
| 57 |
+
# Run
|
| 58 |
+
service = args.func(args)
|
| 59 |
+
service.run()
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
if __name__ == "__main__":
|
| 63 |
+
main()
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/lfs.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Implementation of a custom transfer agent for the transfer type "multipart" for
|
| 3 |
+
git-lfs.
|
| 4 |
+
|
| 5 |
+
Inspired by:
|
| 6 |
+
github.com/cbartz/git-lfs-swift-transfer-agent/blob/master/git_lfs_swift_transfer.py
|
| 7 |
+
|
| 8 |
+
Spec is: github.com/git-lfs/git-lfs/blob/master/docs/custom-transfers.md
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
To launch debugger while developing:
|
| 12 |
+
|
| 13 |
+
``` [lfs "customtransfer.multipart"]
|
| 14 |
+
path = /path/to/huggingface_hub/.env/bin/python args = -m debugpy --listen 5678
|
| 15 |
+
--wait-for-client
|
| 16 |
+
/path/to/huggingface_hub/src/huggingface_hub/commands/huggingface_cli.py
|
| 17 |
+
lfs-multipart-upload ```"""
|
| 18 |
+
|
| 19 |
+
import json
|
| 20 |
+
import os
|
| 21 |
+
import subprocess
|
| 22 |
+
import sys
|
| 23 |
+
from argparse import _SubParsersAction
|
| 24 |
+
from typing import Dict, List, Optional
|
| 25 |
+
|
| 26 |
+
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
| 27 |
+
from huggingface_hub.lfs import LFS_MULTIPART_UPLOAD_COMMAND
|
| 28 |
+
|
| 29 |
+
from ..utils import get_session, hf_raise_for_status, logging
|
| 30 |
+
from ..utils._lfs import SliceFileObj
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
logger = logging.get_logger(__name__)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class LfsCommands(BaseHuggingfaceCLICommand):
|
| 37 |
+
"""
|
| 38 |
+
Implementation of a custom transfer agent for the transfer type "multipart"
|
| 39 |
+
for git-lfs. This lets users upload large files >5GB 🔥. Spec for LFS custom
|
| 40 |
+
transfer agent is:
|
| 41 |
+
https://github.com/git-lfs/git-lfs/blob/master/docs/custom-transfers.md
|
| 42 |
+
|
| 43 |
+
This introduces two commands to the CLI:
|
| 44 |
+
|
| 45 |
+
1. $ huggingface-cli lfs-enable-largefiles
|
| 46 |
+
|
| 47 |
+
This should be executed once for each model repo that contains a model file
|
| 48 |
+
>5GB. It's documented in the error message you get if you just try to git
|
| 49 |
+
push a 5GB file without having enabled it before.
|
| 50 |
+
|
| 51 |
+
2. $ huggingface-cli lfs-multipart-upload
|
| 52 |
+
|
| 53 |
+
This command is called by lfs directly and is not meant to be called by the
|
| 54 |
+
user.
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
@staticmethod
|
| 58 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 59 |
+
enable_parser = parser.add_parser(
|
| 60 |
+
"lfs-enable-largefiles", help="Configure your repository to enable upload of files > 5GB."
|
| 61 |
+
)
|
| 62 |
+
enable_parser.add_argument("path", type=str, help="Local path to repository you want to configure.")
|
| 63 |
+
enable_parser.set_defaults(func=lambda args: LfsEnableCommand(args))
|
| 64 |
+
|
| 65 |
+
# Command will get called by git-lfs, do not call it directly.
|
| 66 |
+
upload_parser = parser.add_parser(LFS_MULTIPART_UPLOAD_COMMAND, add_help=False)
|
| 67 |
+
upload_parser.set_defaults(func=lambda args: LfsUploadCommand(args))
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class LfsEnableCommand:
|
| 71 |
+
def __init__(self, args):
|
| 72 |
+
self.args = args
|
| 73 |
+
|
| 74 |
+
def run(self):
|
| 75 |
+
local_path = os.path.abspath(self.args.path)
|
| 76 |
+
if not os.path.isdir(local_path):
|
| 77 |
+
print("This does not look like a valid git repo.")
|
| 78 |
+
exit(1)
|
| 79 |
+
subprocess.run(
|
| 80 |
+
"git config lfs.customtransfer.multipart.path huggingface-cli".split(),
|
| 81 |
+
check=True,
|
| 82 |
+
cwd=local_path,
|
| 83 |
+
)
|
| 84 |
+
subprocess.run(
|
| 85 |
+
f"git config lfs.customtransfer.multipart.args {LFS_MULTIPART_UPLOAD_COMMAND}".split(),
|
| 86 |
+
check=True,
|
| 87 |
+
cwd=local_path,
|
| 88 |
+
)
|
| 89 |
+
print("Local repo set up for largefiles")
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def write_msg(msg: Dict):
|
| 93 |
+
"""Write out the message in Line delimited JSON."""
|
| 94 |
+
msg_str = json.dumps(msg) + "\n"
|
| 95 |
+
sys.stdout.write(msg_str)
|
| 96 |
+
sys.stdout.flush()
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def read_msg() -> Optional[Dict]:
|
| 100 |
+
"""Read Line delimited JSON from stdin."""
|
| 101 |
+
msg = json.loads(sys.stdin.readline().strip())
|
| 102 |
+
|
| 103 |
+
if "terminate" in (msg.get("type"), msg.get("event")):
|
| 104 |
+
# terminate message received
|
| 105 |
+
return None
|
| 106 |
+
|
| 107 |
+
if msg.get("event") not in ("download", "upload"):
|
| 108 |
+
logger.critical("Received unexpected message")
|
| 109 |
+
sys.exit(1)
|
| 110 |
+
|
| 111 |
+
return msg
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class LfsUploadCommand:
|
| 115 |
+
def __init__(self, args) -> None:
|
| 116 |
+
self.args = args
|
| 117 |
+
|
| 118 |
+
def run(self) -> None:
|
| 119 |
+
# Immediately after invoking a custom transfer process, git-lfs
|
| 120 |
+
# sends initiation data to the process over stdin.
|
| 121 |
+
# This tells the process useful information about the configuration.
|
| 122 |
+
init_msg = json.loads(sys.stdin.readline().strip())
|
| 123 |
+
if not (init_msg.get("event") == "init" and init_msg.get("operation") == "upload"):
|
| 124 |
+
write_msg({"error": {"code": 32, "message": "Wrong lfs init operation"}})
|
| 125 |
+
sys.exit(1)
|
| 126 |
+
|
| 127 |
+
# The transfer process should use the information it needs from the
|
| 128 |
+
# initiation structure, and also perform any one-off setup tasks it
|
| 129 |
+
# needs to do. It should then respond on stdout with a simple empty
|
| 130 |
+
# confirmation structure, as follows:
|
| 131 |
+
write_msg({})
|
| 132 |
+
|
| 133 |
+
# After the initiation exchange, git-lfs will send any number of
|
| 134 |
+
# transfer requests to the stdin of the transfer process, in a serial sequence.
|
| 135 |
+
while True:
|
| 136 |
+
msg = read_msg()
|
| 137 |
+
if msg is None:
|
| 138 |
+
# When all transfers have been processed, git-lfs will send
|
| 139 |
+
# a terminate event to the stdin of the transfer process.
|
| 140 |
+
# On receiving this message the transfer process should
|
| 141 |
+
# clean up and terminate. No response is expected.
|
| 142 |
+
sys.exit(0)
|
| 143 |
+
|
| 144 |
+
oid = msg["oid"]
|
| 145 |
+
filepath = msg["path"]
|
| 146 |
+
completion_url = msg["action"]["href"]
|
| 147 |
+
header = msg["action"]["header"]
|
| 148 |
+
chunk_size = int(header.pop("chunk_size"))
|
| 149 |
+
presigned_urls: List[str] = list(header.values())
|
| 150 |
+
|
| 151 |
+
# Send a "started" progress event to allow other workers to start.
|
| 152 |
+
# Otherwise they're delayed until first "progress" event is reported,
|
| 153 |
+
# i.e. after the first 5GB by default (!)
|
| 154 |
+
write_msg(
|
| 155 |
+
{
|
| 156 |
+
"event": "progress",
|
| 157 |
+
"oid": oid,
|
| 158 |
+
"bytesSoFar": 1,
|
| 159 |
+
"bytesSinceLast": 0,
|
| 160 |
+
}
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
parts = []
|
| 164 |
+
with open(filepath, "rb") as file:
|
| 165 |
+
for i, presigned_url in enumerate(presigned_urls):
|
| 166 |
+
with SliceFileObj(
|
| 167 |
+
file,
|
| 168 |
+
seek_from=i * chunk_size,
|
| 169 |
+
read_limit=chunk_size,
|
| 170 |
+
) as data:
|
| 171 |
+
r = get_session().put(presigned_url, data=data)
|
| 172 |
+
hf_raise_for_status(r)
|
| 173 |
+
parts.append(
|
| 174 |
+
{
|
| 175 |
+
"etag": r.headers.get("etag"),
|
| 176 |
+
"partNumber": i + 1,
|
| 177 |
+
}
|
| 178 |
+
)
|
| 179 |
+
# In order to support progress reporting while data is uploading / downloading,
|
| 180 |
+
# the transfer process should post messages to stdout
|
| 181 |
+
write_msg(
|
| 182 |
+
{
|
| 183 |
+
"event": "progress",
|
| 184 |
+
"oid": oid,
|
| 185 |
+
"bytesSoFar": (i + 1) * chunk_size,
|
| 186 |
+
"bytesSinceLast": chunk_size,
|
| 187 |
+
}
|
| 188 |
+
)
|
| 189 |
+
# Not precise but that's ok.
|
| 190 |
+
|
| 191 |
+
r = get_session().post(
|
| 192 |
+
completion_url,
|
| 193 |
+
json={
|
| 194 |
+
"oid": oid,
|
| 195 |
+
"parts": parts,
|
| 196 |
+
},
|
| 197 |
+
)
|
| 198 |
+
hf_raise_for_status(r)
|
| 199 |
+
|
| 200 |
+
write_msg({"event": "complete", "oid": oid})
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/repo.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2025 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Contains commands to interact with repositories on the Hugging Face Hub.
|
| 15 |
+
|
| 16 |
+
Usage:
|
| 17 |
+
# create a new dataset repo on the Hub
|
| 18 |
+
huggingface-cli repo create my-cool-dataset --repo-type=dataset
|
| 19 |
+
|
| 20 |
+
# create a private model repo on the Hub
|
| 21 |
+
huggingface-cli repo create my-cool-model --private
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
import argparse
|
| 25 |
+
from argparse import _SubParsersAction
|
| 26 |
+
from typing import Optional
|
| 27 |
+
|
| 28 |
+
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
| 29 |
+
from huggingface_hub.commands._cli_utils import ANSI
|
| 30 |
+
from huggingface_hub.constants import SPACES_SDK_TYPES
|
| 31 |
+
from huggingface_hub.hf_api import HfApi
|
| 32 |
+
from huggingface_hub.utils import logging
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
logger = logging.get_logger(__name__)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class RepoCommands(BaseHuggingfaceCLICommand):
|
| 39 |
+
@staticmethod
|
| 40 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 41 |
+
repo_parser = parser.add_parser("repo", help="{create} Commands to interact with your huggingface.co repos.")
|
| 42 |
+
repo_subparsers = repo_parser.add_subparsers(help="huggingface.co repos related commands")
|
| 43 |
+
repo_create_parser = repo_subparsers.add_parser("create", help="Create a new repo on huggingface.co")
|
| 44 |
+
repo_create_parser.add_argument(
|
| 45 |
+
"repo_id",
|
| 46 |
+
type=str,
|
| 47 |
+
help="The ID of the repo to create to (e.g. `username/repo-name`). The username is optional and will be set to your username if not provided.",
|
| 48 |
+
)
|
| 49 |
+
repo_create_parser.add_argument(
|
| 50 |
+
"--repo-type",
|
| 51 |
+
type=str,
|
| 52 |
+
help='Optional: set to "dataset" or "space" if creating a dataset or space, default is model.',
|
| 53 |
+
)
|
| 54 |
+
repo_create_parser.add_argument(
|
| 55 |
+
"--space_sdk",
|
| 56 |
+
type=str,
|
| 57 |
+
help='Optional: Hugging Face Spaces SDK type. Required when --type is set to "space".',
|
| 58 |
+
choices=SPACES_SDK_TYPES,
|
| 59 |
+
)
|
| 60 |
+
repo_create_parser.add_argument(
|
| 61 |
+
"--private",
|
| 62 |
+
action="store_true",
|
| 63 |
+
help="Whether to create a private repository. Defaults to public unless the organization's default is private.",
|
| 64 |
+
)
|
| 65 |
+
repo_create_parser.add_argument(
|
| 66 |
+
"--token",
|
| 67 |
+
type=str,
|
| 68 |
+
help="Hugging Face token. Will default to the locally saved token if not provided.",
|
| 69 |
+
)
|
| 70 |
+
repo_create_parser.add_argument(
|
| 71 |
+
"--exist-ok",
|
| 72 |
+
action="store_true",
|
| 73 |
+
help="Do not raise an error if repo already exists.",
|
| 74 |
+
)
|
| 75 |
+
repo_create_parser.add_argument(
|
| 76 |
+
"--resource-group-id",
|
| 77 |
+
type=str,
|
| 78 |
+
help="Resource group in which to create the repo. Resource groups is only available for Enterprise Hub organizations.",
|
| 79 |
+
)
|
| 80 |
+
repo_create_parser.add_argument(
|
| 81 |
+
"--type",
|
| 82 |
+
type=str,
|
| 83 |
+
help="[Deprecated]: use --repo-type instead.",
|
| 84 |
+
)
|
| 85 |
+
repo_create_parser.add_argument(
|
| 86 |
+
"-y",
|
| 87 |
+
"--yes",
|
| 88 |
+
action="store_true",
|
| 89 |
+
help="[Deprecated] no effect.",
|
| 90 |
+
)
|
| 91 |
+
repo_create_parser.add_argument(
|
| 92 |
+
"--organization", type=str, help="[Deprecated] Pass the organization namespace directly in the repo_id."
|
| 93 |
+
)
|
| 94 |
+
repo_create_parser.set_defaults(func=lambda args: RepoCreateCommand(args))
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class RepoCreateCommand:
|
| 98 |
+
def __init__(self, args: argparse.Namespace):
|
| 99 |
+
self.repo_id: str = args.repo_id
|
| 100 |
+
self.repo_type: Optional[str] = args.repo_type or args.type
|
| 101 |
+
self.space_sdk: Optional[str] = args.space_sdk
|
| 102 |
+
self.organization: Optional[str] = args.organization
|
| 103 |
+
self.yes: bool = args.yes
|
| 104 |
+
self.private: bool = args.private
|
| 105 |
+
self.token: Optional[str] = args.token
|
| 106 |
+
self.exist_ok: bool = args.exist_ok
|
| 107 |
+
self.resource_group_id: Optional[str] = args.resource_group_id
|
| 108 |
+
|
| 109 |
+
if args.type is not None:
|
| 110 |
+
print(
|
| 111 |
+
ANSI.yellow(
|
| 112 |
+
"The --type argument is deprecated and will be removed in a future version. Use --repo-type instead."
|
| 113 |
+
)
|
| 114 |
+
)
|
| 115 |
+
if self.organization is not None:
|
| 116 |
+
print(
|
| 117 |
+
ANSI.yellow(
|
| 118 |
+
"The --organization argument is deprecated and will be removed in a future version. Pass the organization namespace directly in the repo_id."
|
| 119 |
+
)
|
| 120 |
+
)
|
| 121 |
+
if self.yes:
|
| 122 |
+
print(
|
| 123 |
+
ANSI.yellow(
|
| 124 |
+
"The --yes argument is deprecated and will be removed in a future version. It does not have any effect."
|
| 125 |
+
)
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
self._api = HfApi()
|
| 129 |
+
|
| 130 |
+
def run(self):
|
| 131 |
+
if self.organization is not None:
|
| 132 |
+
if "/" in self.repo_id:
|
| 133 |
+
print(ANSI.red("You cannot pass both --organization and a repo_id with a namespace."))
|
| 134 |
+
exit(1)
|
| 135 |
+
self.repo_id = f"{self.organization}/{self.repo_id}"
|
| 136 |
+
|
| 137 |
+
repo_url = self._api.create_repo(
|
| 138 |
+
repo_id=self.repo_id,
|
| 139 |
+
repo_type=self.repo_type,
|
| 140 |
+
private=self.private,
|
| 141 |
+
token=self.token,
|
| 142 |
+
exist_ok=self.exist_ok,
|
| 143 |
+
resource_group_id=self.resource_group_id,
|
| 144 |
+
space_sdk=self.space_sdk,
|
| 145 |
+
)
|
| 146 |
+
print(f"Successfully created {ANSI.bold(repo_url.repo_id)} on the Hub.")
|
| 147 |
+
print(f"Your repo is now available at {ANSI.bold(repo_url)}")
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/repo_files.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2023-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains command to update or delete files in a repository using the CLI.
|
| 16 |
+
|
| 17 |
+
Usage:
|
| 18 |
+
# delete all
|
| 19 |
+
huggingface-cli repo-files <repo_id> delete "*"
|
| 20 |
+
|
| 21 |
+
# delete single file
|
| 22 |
+
huggingface-cli repo-files <repo_id> delete file.txt
|
| 23 |
+
|
| 24 |
+
# delete single folder
|
| 25 |
+
huggingface-cli repo-files <repo_id> delete folder/
|
| 26 |
+
|
| 27 |
+
# delete multiple
|
| 28 |
+
huggingface-cli repo-files <repo_id> delete file.txt folder/ file2.txt
|
| 29 |
+
|
| 30 |
+
# delete multiple patterns
|
| 31 |
+
huggingface-cli repo-files <repo_id> delete file.txt "*.json" "folder/*.parquet"
|
| 32 |
+
|
| 33 |
+
# delete from different revision / repo-type
|
| 34 |
+
huggingface-cli repo-files <repo_id> delete file.txt --revision=refs/pr/1 --repo-type=dataset
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
from argparse import _SubParsersAction
|
| 38 |
+
from typing import List, Optional
|
| 39 |
+
|
| 40 |
+
from huggingface_hub import logging
|
| 41 |
+
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
| 42 |
+
from huggingface_hub.hf_api import HfApi
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
logger = logging.get_logger(__name__)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class DeleteFilesSubCommand:
|
| 49 |
+
def __init__(self, args) -> None:
|
| 50 |
+
self.args = args
|
| 51 |
+
self.repo_id: str = args.repo_id
|
| 52 |
+
self.repo_type: Optional[str] = args.repo_type
|
| 53 |
+
self.revision: Optional[str] = args.revision
|
| 54 |
+
self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli")
|
| 55 |
+
self.patterns: List[str] = args.patterns
|
| 56 |
+
self.commit_message: Optional[str] = args.commit_message
|
| 57 |
+
self.commit_description: Optional[str] = args.commit_description
|
| 58 |
+
self.create_pr: bool = args.create_pr
|
| 59 |
+
self.token: Optional[str] = args.token
|
| 60 |
+
|
| 61 |
+
def run(self) -> None:
|
| 62 |
+
logging.set_verbosity_info()
|
| 63 |
+
url = self.api.delete_files(
|
| 64 |
+
delete_patterns=self.patterns,
|
| 65 |
+
repo_id=self.repo_id,
|
| 66 |
+
repo_type=self.repo_type,
|
| 67 |
+
revision=self.revision,
|
| 68 |
+
commit_message=self.commit_message,
|
| 69 |
+
commit_description=self.commit_description,
|
| 70 |
+
create_pr=self.create_pr,
|
| 71 |
+
)
|
| 72 |
+
print(f"Files correctly deleted from repo. Commit: {url}.")
|
| 73 |
+
logging.set_verbosity_warning()
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class RepoFilesCommand(BaseHuggingfaceCLICommand):
|
| 77 |
+
@staticmethod
|
| 78 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 79 |
+
repo_files_parser = parser.add_parser("repo-files", help="Manage files in a repo on the Hub")
|
| 80 |
+
repo_files_parser.add_argument(
|
| 81 |
+
"repo_id", type=str, help="The ID of the repo to manage (e.g. `username/repo-name`)."
|
| 82 |
+
)
|
| 83 |
+
repo_files_subparsers = repo_files_parser.add_subparsers(
|
| 84 |
+
help="Action to execute against the files.",
|
| 85 |
+
required=True,
|
| 86 |
+
)
|
| 87 |
+
delete_subparser = repo_files_subparsers.add_parser(
|
| 88 |
+
"delete",
|
| 89 |
+
help="Delete files from a repo on the Hub",
|
| 90 |
+
)
|
| 91 |
+
delete_subparser.set_defaults(func=lambda args: DeleteFilesSubCommand(args))
|
| 92 |
+
delete_subparser.add_argument(
|
| 93 |
+
"patterns",
|
| 94 |
+
nargs="+",
|
| 95 |
+
type=str,
|
| 96 |
+
help="Glob patterns to match files to delete.",
|
| 97 |
+
)
|
| 98 |
+
delete_subparser.add_argument(
|
| 99 |
+
"--repo-type",
|
| 100 |
+
choices=["model", "dataset", "space"],
|
| 101 |
+
default="model",
|
| 102 |
+
help="Type of the repo to upload to (e.g. `dataset`).",
|
| 103 |
+
)
|
| 104 |
+
delete_subparser.add_argument(
|
| 105 |
+
"--revision",
|
| 106 |
+
type=str,
|
| 107 |
+
help=(
|
| 108 |
+
"An optional Git revision to push to. It can be a branch name "
|
| 109 |
+
"or a PR reference. If revision does not"
|
| 110 |
+
" exist and `--create-pr` is not set, a branch will be automatically created."
|
| 111 |
+
),
|
| 112 |
+
)
|
| 113 |
+
delete_subparser.add_argument(
|
| 114 |
+
"--commit-message", type=str, help="The summary / title / first line of the generated commit."
|
| 115 |
+
)
|
| 116 |
+
delete_subparser.add_argument(
|
| 117 |
+
"--commit-description", type=str, help="The description of the generated commit."
|
| 118 |
+
)
|
| 119 |
+
delete_subparser.add_argument(
|
| 120 |
+
"--create-pr", action="store_true", help="Whether to create a new Pull Request for these changes."
|
| 121 |
+
)
|
| 122 |
+
repo_files_parser.add_argument(
|
| 123 |
+
"--token",
|
| 124 |
+
type=str,
|
| 125 |
+
help="A User Access Token generated from https://huggingface.co/settings/tokens",
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
repo_files_parser.set_defaults(func=RepoFilesCommand)
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/scan_cache.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2022-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains command to scan the HF cache directory.
|
| 16 |
+
|
| 17 |
+
Usage:
|
| 18 |
+
huggingface-cli scan-cache
|
| 19 |
+
huggingface-cli scan-cache -v
|
| 20 |
+
huggingface-cli scan-cache -vvv
|
| 21 |
+
huggingface-cli scan-cache --dir ~/.cache/huggingface/hub
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
import time
|
| 25 |
+
from argparse import Namespace, _SubParsersAction
|
| 26 |
+
from typing import Optional
|
| 27 |
+
|
| 28 |
+
from ..utils import CacheNotFound, HFCacheInfo, scan_cache_dir
|
| 29 |
+
from . import BaseHuggingfaceCLICommand
|
| 30 |
+
from ._cli_utils import ANSI, tabulate
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class ScanCacheCommand(BaseHuggingfaceCLICommand):
|
| 34 |
+
@staticmethod
|
| 35 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 36 |
+
scan_cache_parser = parser.add_parser("scan-cache", help="Scan cache directory.")
|
| 37 |
+
|
| 38 |
+
scan_cache_parser.add_argument(
|
| 39 |
+
"--dir",
|
| 40 |
+
type=str,
|
| 41 |
+
default=None,
|
| 42 |
+
help="cache directory to scan (optional). Default to the default HuggingFace cache.",
|
| 43 |
+
)
|
| 44 |
+
scan_cache_parser.add_argument(
|
| 45 |
+
"-v",
|
| 46 |
+
"--verbose",
|
| 47 |
+
action="count",
|
| 48 |
+
default=0,
|
| 49 |
+
help="show a more verbose output",
|
| 50 |
+
)
|
| 51 |
+
scan_cache_parser.set_defaults(func=ScanCacheCommand)
|
| 52 |
+
|
| 53 |
+
def __init__(self, args: Namespace) -> None:
|
| 54 |
+
self.verbosity: int = args.verbose
|
| 55 |
+
self.cache_dir: Optional[str] = args.dir
|
| 56 |
+
|
| 57 |
+
def run(self):
|
| 58 |
+
try:
|
| 59 |
+
t0 = time.time()
|
| 60 |
+
hf_cache_info = scan_cache_dir(self.cache_dir)
|
| 61 |
+
t1 = time.time()
|
| 62 |
+
except CacheNotFound as exc:
|
| 63 |
+
cache_dir = exc.cache_dir
|
| 64 |
+
print(f"Cache directory not found: {cache_dir}")
|
| 65 |
+
return
|
| 66 |
+
|
| 67 |
+
self._print_hf_cache_info_as_table(hf_cache_info)
|
| 68 |
+
|
| 69 |
+
print(
|
| 70 |
+
f"\nDone in {round(t1 - t0, 1)}s. Scanned {len(hf_cache_info.repos)} repo(s)"
|
| 71 |
+
f" for a total of {ANSI.red(hf_cache_info.size_on_disk_str)}."
|
| 72 |
+
)
|
| 73 |
+
if len(hf_cache_info.warnings) > 0:
|
| 74 |
+
message = f"Got {len(hf_cache_info.warnings)} warning(s) while scanning."
|
| 75 |
+
if self.verbosity >= 3:
|
| 76 |
+
print(ANSI.gray(message))
|
| 77 |
+
for warning in hf_cache_info.warnings:
|
| 78 |
+
print(ANSI.gray(warning))
|
| 79 |
+
else:
|
| 80 |
+
print(ANSI.gray(message + " Use -vvv to print details."))
|
| 81 |
+
|
| 82 |
+
def _print_hf_cache_info_as_table(self, hf_cache_info: HFCacheInfo) -> None:
|
| 83 |
+
print(get_table(hf_cache_info, verbosity=self.verbosity))
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def get_table(hf_cache_info: HFCacheInfo, *, verbosity: int = 0) -> str:
|
| 87 |
+
"""Generate a table from the [`HFCacheInfo`] object.
|
| 88 |
+
|
| 89 |
+
Pass `verbosity=0` to get a table with a single row per repo, with columns
|
| 90 |
+
"repo_id", "repo_type", "size_on_disk", "nb_files", "last_accessed", "last_modified", "refs", "local_path".
|
| 91 |
+
|
| 92 |
+
Pass `verbosity=1` to get a table with a row per repo and revision (thus multiple rows can appear for a single repo), with columns
|
| 93 |
+
"repo_id", "repo_type", "revision", "size_on_disk", "nb_files", "last_modified", "refs", "local_path".
|
| 94 |
+
|
| 95 |
+
Example:
|
| 96 |
+
```py
|
| 97 |
+
>>> from huggingface_hub.utils import scan_cache_dir
|
| 98 |
+
>>> from huggingface_hub.commands.scan_cache import get_table
|
| 99 |
+
|
| 100 |
+
>>> hf_cache_info = scan_cache_dir()
|
| 101 |
+
HFCacheInfo(...)
|
| 102 |
+
|
| 103 |
+
>>> print(get_table(hf_cache_info, verbosity=0))
|
| 104 |
+
REPO ID REPO TYPE SIZE ON DISK NB FILES LAST_ACCESSED LAST_MODIFIED REFS LOCAL PATH
|
| 105 |
+
--------------------------------------------------- --------- ------------ -------- ------------- ------------- ---- --------------------------------------------------------------------------------------------------
|
| 106 |
+
roberta-base model 2.7M 5 1 day ago 1 week ago main C:\\Users\\admin\\.cache\\huggingface\\hub\\models--roberta-base
|
| 107 |
+
suno/bark model 8.8K 1 1 week ago 1 week ago main C:\\Users\\admin\\.cache\\huggingface\\hub\\models--suno--bark
|
| 108 |
+
t5-base model 893.8M 4 4 days ago 7 months ago main C:\\Users\\admin\\.cache\\huggingface\\hub\\models--t5-base
|
| 109 |
+
t5-large model 3.0G 4 5 weeks ago 5 months ago main C:\\Users\\admin\\.cache\\huggingface\\hub\\models--t5-large
|
| 110 |
+
|
| 111 |
+
>>> print(get_table(hf_cache_info, verbosity=1))
|
| 112 |
+
REPO ID REPO TYPE REVISION SIZE ON DISK NB FILES LAST_MODIFIED REFS LOCAL PATH
|
| 113 |
+
--------------------------------------------------- --------- ---------------------------------------- ------------ -------- ------------- ---- -----------------------------------------------------------------------------------------------------------------------------------------------------
|
| 114 |
+
roberta-base model e2da8e2f811d1448a5b465c236feacd80ffbac7b 2.7M 5 1 week ago main C:\\Users\\admin\\.cache\\huggingface\\hub\\models--roberta-base\\snapshots\\e2da8e2f811d1448a5b465c236feacd80ffbac7b
|
| 115 |
+
suno/bark model 70a8a7d34168586dc5d028fa9666aceade177992 8.8K 1 1 week ago main C:\\Users\\admin\\.cache\\huggingface\\hub\\models--suno--bark\\snapshots\\70a8a7d34168586dc5d028fa9666aceade177992
|
| 116 |
+
t5-base model a9723ea7f1b39c1eae772870f3b547bf6ef7e6c1 893.8M 4 7 months ago main C:\\Users\\admin\\.cache\\huggingface\\hub\\models--t5-base\\snapshots\\a9723ea7f1b39c1eae772870f3b547bf6ef7e6c1
|
| 117 |
+
t5-large model 150ebc2c4b72291e770f58e6057481c8d2ed331a 3.0G 4 5 months ago main C:\\Users\\admin\\.cache\\huggingface\\hub\\models--t5-large\\snapshots\\150ebc2c4b72291e770f58e6057481c8d2ed331a ```
|
| 118 |
+
```
|
| 119 |
+
|
| 120 |
+
Args:
|
| 121 |
+
hf_cache_info ([`HFCacheInfo`]):
|
| 122 |
+
The HFCacheInfo object to print.
|
| 123 |
+
verbosity (`int`, *optional*):
|
| 124 |
+
The verbosity level. Defaults to 0.
|
| 125 |
+
|
| 126 |
+
Returns:
|
| 127 |
+
`str`: The table as a string.
|
| 128 |
+
"""
|
| 129 |
+
if verbosity == 0:
|
| 130 |
+
return tabulate(
|
| 131 |
+
rows=[
|
| 132 |
+
[
|
| 133 |
+
repo.repo_id,
|
| 134 |
+
repo.repo_type,
|
| 135 |
+
"{:>12}".format(repo.size_on_disk_str),
|
| 136 |
+
repo.nb_files,
|
| 137 |
+
repo.last_accessed_str,
|
| 138 |
+
repo.last_modified_str,
|
| 139 |
+
", ".join(sorted(repo.refs)),
|
| 140 |
+
str(repo.repo_path),
|
| 141 |
+
]
|
| 142 |
+
for repo in sorted(hf_cache_info.repos, key=lambda repo: repo.repo_path)
|
| 143 |
+
],
|
| 144 |
+
headers=[
|
| 145 |
+
"REPO ID",
|
| 146 |
+
"REPO TYPE",
|
| 147 |
+
"SIZE ON DISK",
|
| 148 |
+
"NB FILES",
|
| 149 |
+
"LAST_ACCESSED",
|
| 150 |
+
"LAST_MODIFIED",
|
| 151 |
+
"REFS",
|
| 152 |
+
"LOCAL PATH",
|
| 153 |
+
],
|
| 154 |
+
)
|
| 155 |
+
else:
|
| 156 |
+
return tabulate(
|
| 157 |
+
rows=[
|
| 158 |
+
[
|
| 159 |
+
repo.repo_id,
|
| 160 |
+
repo.repo_type,
|
| 161 |
+
revision.commit_hash,
|
| 162 |
+
"{:>12}".format(revision.size_on_disk_str),
|
| 163 |
+
revision.nb_files,
|
| 164 |
+
revision.last_modified_str,
|
| 165 |
+
", ".join(sorted(revision.refs)),
|
| 166 |
+
str(revision.snapshot_path),
|
| 167 |
+
]
|
| 168 |
+
for repo in sorted(hf_cache_info.repos, key=lambda repo: repo.repo_path)
|
| 169 |
+
for revision in sorted(repo.revisions, key=lambda revision: revision.commit_hash)
|
| 170 |
+
],
|
| 171 |
+
headers=[
|
| 172 |
+
"REPO ID",
|
| 173 |
+
"REPO TYPE",
|
| 174 |
+
"REVISION",
|
| 175 |
+
"SIZE ON DISK",
|
| 176 |
+
"NB FILES",
|
| 177 |
+
"LAST_MODIFIED",
|
| 178 |
+
"REFS",
|
| 179 |
+
"LOCAL PATH",
|
| 180 |
+
],
|
| 181 |
+
)
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/tag.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2024-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
|
| 16 |
+
"""Contains commands to perform tag management with the CLI.
|
| 17 |
+
|
| 18 |
+
Usage Examples:
|
| 19 |
+
- Create a tag:
|
| 20 |
+
$ huggingface-cli tag user/my-model 1.0 --message "First release"
|
| 21 |
+
$ huggingface-cli tag user/my-model 1.0 -m "First release" --revision develop
|
| 22 |
+
$ huggingface-cli tag user/my-dataset 1.0 -m "First release" --repo-type dataset
|
| 23 |
+
$ huggingface-cli tag user/my-space 1.0
|
| 24 |
+
- List all tags:
|
| 25 |
+
$ huggingface-cli tag -l user/my-model
|
| 26 |
+
$ huggingface-cli tag --list user/my-dataset --repo-type dataset
|
| 27 |
+
- Delete a tag:
|
| 28 |
+
$ huggingface-cli tag -d user/my-model 1.0
|
| 29 |
+
$ huggingface-cli tag --delete user/my-dataset 1.0 --repo-type dataset
|
| 30 |
+
$ huggingface-cli tag -d user/my-space 1.0 -y
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
from argparse import Namespace, _SubParsersAction
|
| 34 |
+
|
| 35 |
+
from requests.exceptions import HTTPError
|
| 36 |
+
|
| 37 |
+
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
| 38 |
+
from huggingface_hub.constants import (
|
| 39 |
+
REPO_TYPES,
|
| 40 |
+
)
|
| 41 |
+
from huggingface_hub.hf_api import HfApi
|
| 42 |
+
|
| 43 |
+
from ..errors import HfHubHTTPError, RepositoryNotFoundError, RevisionNotFoundError
|
| 44 |
+
from ._cli_utils import ANSI
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class TagCommands(BaseHuggingfaceCLICommand):
|
| 48 |
+
@staticmethod
|
| 49 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 50 |
+
tag_parser = parser.add_parser("tag", help="(create, list, delete) tags for a repo in the hub")
|
| 51 |
+
|
| 52 |
+
tag_parser.add_argument("repo_id", type=str, help="The ID of the repo to tag (e.g. `username/repo-name`).")
|
| 53 |
+
tag_parser.add_argument("tag", nargs="?", type=str, help="The name of the tag for creation or deletion.")
|
| 54 |
+
tag_parser.add_argument("-m", "--message", type=str, help="The description of the tag to create.")
|
| 55 |
+
tag_parser.add_argument("--revision", type=str, help="The git revision to tag.")
|
| 56 |
+
tag_parser.add_argument(
|
| 57 |
+
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens."
|
| 58 |
+
)
|
| 59 |
+
tag_parser.add_argument(
|
| 60 |
+
"--repo-type",
|
| 61 |
+
choices=["model", "dataset", "space"],
|
| 62 |
+
default="model",
|
| 63 |
+
help="Set the type of repository (model, dataset, or space).",
|
| 64 |
+
)
|
| 65 |
+
tag_parser.add_argument("-y", "--yes", action="store_true", help="Answer Yes to prompts automatically.")
|
| 66 |
+
|
| 67 |
+
tag_parser.add_argument("-l", "--list", action="store_true", help="List tags for a repository.")
|
| 68 |
+
tag_parser.add_argument("-d", "--delete", action="store_true", help="Delete a tag for a repository.")
|
| 69 |
+
|
| 70 |
+
tag_parser.set_defaults(func=lambda args: handle_commands(args))
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def handle_commands(args: Namespace):
|
| 74 |
+
if args.list:
|
| 75 |
+
return TagListCommand(args)
|
| 76 |
+
elif args.delete:
|
| 77 |
+
return TagDeleteCommand(args)
|
| 78 |
+
else:
|
| 79 |
+
return TagCreateCommand(args)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class TagCommand:
|
| 83 |
+
def __init__(self, args: Namespace):
|
| 84 |
+
self.args = args
|
| 85 |
+
self.api = HfApi(token=self.args.token)
|
| 86 |
+
self.repo_id = self.args.repo_id
|
| 87 |
+
self.repo_type = self.args.repo_type
|
| 88 |
+
if self.repo_type not in REPO_TYPES:
|
| 89 |
+
print("Invalid repo --repo-type")
|
| 90 |
+
exit(1)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class TagCreateCommand(TagCommand):
|
| 94 |
+
def run(self):
|
| 95 |
+
print(f"You are about to create tag {ANSI.bold(self.args.tag)} on {self.repo_type} {ANSI.bold(self.repo_id)}")
|
| 96 |
+
|
| 97 |
+
try:
|
| 98 |
+
self.api.create_tag(
|
| 99 |
+
repo_id=self.repo_id,
|
| 100 |
+
tag=self.args.tag,
|
| 101 |
+
tag_message=self.args.message,
|
| 102 |
+
revision=self.args.revision,
|
| 103 |
+
repo_type=self.repo_type,
|
| 104 |
+
)
|
| 105 |
+
except RepositoryNotFoundError:
|
| 106 |
+
print(f"{self.repo_type.capitalize()} {ANSI.bold(self.repo_id)} not found.")
|
| 107 |
+
exit(1)
|
| 108 |
+
except RevisionNotFoundError:
|
| 109 |
+
print(f"Revision {ANSI.bold(self.args.revision)} not found.")
|
| 110 |
+
exit(1)
|
| 111 |
+
except HfHubHTTPError as e:
|
| 112 |
+
if e.response.status_code == 409:
|
| 113 |
+
print(f"Tag {ANSI.bold(self.args.tag)} already exists on {ANSI.bold(self.repo_id)}")
|
| 114 |
+
exit(1)
|
| 115 |
+
raise e
|
| 116 |
+
|
| 117 |
+
print(f"Tag {ANSI.bold(self.args.tag)} created on {ANSI.bold(self.repo_id)}")
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class TagListCommand(TagCommand):
|
| 121 |
+
def run(self):
|
| 122 |
+
try:
|
| 123 |
+
refs = self.api.list_repo_refs(
|
| 124 |
+
repo_id=self.repo_id,
|
| 125 |
+
repo_type=self.repo_type,
|
| 126 |
+
)
|
| 127 |
+
except RepositoryNotFoundError:
|
| 128 |
+
print(f"{self.repo_type.capitalize()} {ANSI.bold(self.repo_id)} not found.")
|
| 129 |
+
exit(1)
|
| 130 |
+
except HTTPError as e:
|
| 131 |
+
print(e)
|
| 132 |
+
print(ANSI.red(e.response.text))
|
| 133 |
+
exit(1)
|
| 134 |
+
if len(refs.tags) == 0:
|
| 135 |
+
print("No tags found")
|
| 136 |
+
exit(0)
|
| 137 |
+
print(f"Tags for {self.repo_type} {ANSI.bold(self.repo_id)}:")
|
| 138 |
+
for tag in refs.tags:
|
| 139 |
+
print(tag.name)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class TagDeleteCommand(TagCommand):
|
| 143 |
+
def run(self):
|
| 144 |
+
print(f"You are about to delete tag {ANSI.bold(self.args.tag)} on {self.repo_type} {ANSI.bold(self.repo_id)}")
|
| 145 |
+
|
| 146 |
+
if not self.args.yes:
|
| 147 |
+
choice = input("Proceed? [Y/n] ").lower()
|
| 148 |
+
if choice not in ("", "y", "yes"):
|
| 149 |
+
print("Abort")
|
| 150 |
+
exit()
|
| 151 |
+
try:
|
| 152 |
+
self.api.delete_tag(repo_id=self.repo_id, tag=self.args.tag, repo_type=self.repo_type)
|
| 153 |
+
except RepositoryNotFoundError:
|
| 154 |
+
print(f"{self.repo_type.capitalize()} {ANSI.bold(self.repo_id)} not found.")
|
| 155 |
+
exit(1)
|
| 156 |
+
except RevisionNotFoundError:
|
| 157 |
+
print(f"Tag {ANSI.bold(self.args.tag)} not found on {ANSI.bold(self.repo_id)}")
|
| 158 |
+
exit(1)
|
| 159 |
+
print(f"Tag {ANSI.bold(self.args.tag)} deleted on {ANSI.bold(self.repo_id)}")
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/upload.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2023-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains command to upload a repo or file with the CLI.
|
| 16 |
+
|
| 17 |
+
Usage:
|
| 18 |
+
# Upload file (implicit)
|
| 19 |
+
huggingface-cli upload my-cool-model ./my-cool-model.safetensors
|
| 20 |
+
|
| 21 |
+
# Upload file (explicit)
|
| 22 |
+
huggingface-cli upload my-cool-model ./my-cool-model.safetensors model.safetensors
|
| 23 |
+
|
| 24 |
+
# Upload directory (implicit). If `my-cool-model/` is a directory it will be uploaded, otherwise an exception is raised.
|
| 25 |
+
huggingface-cli upload my-cool-model
|
| 26 |
+
|
| 27 |
+
# Upload directory (explicit)
|
| 28 |
+
huggingface-cli upload my-cool-model ./models/my-cool-model .
|
| 29 |
+
|
| 30 |
+
# Upload filtered directory (example: tensorboard logs except for the last run)
|
| 31 |
+
huggingface-cli upload my-cool-model ./model/training /logs --include "*.tfevents.*" --exclude "*20230905*"
|
| 32 |
+
|
| 33 |
+
# Upload with wildcard
|
| 34 |
+
huggingface-cli upload my-cool-model "./model/training/*.safetensors"
|
| 35 |
+
|
| 36 |
+
# Upload private dataset
|
| 37 |
+
huggingface-cli upload Wauplin/my-cool-dataset ./data . --repo-type=dataset --private
|
| 38 |
+
|
| 39 |
+
# Upload with token
|
| 40 |
+
huggingface-cli upload Wauplin/my-cool-model --token=hf_****
|
| 41 |
+
|
| 42 |
+
# Sync local Space with Hub (upload new files, delete removed files)
|
| 43 |
+
huggingface-cli upload Wauplin/space-example --repo-type=space --exclude="/logs/*" --delete="*" --commit-message="Sync local Space with Hub"
|
| 44 |
+
|
| 45 |
+
# Schedule commits every 30 minutes
|
| 46 |
+
huggingface-cli upload Wauplin/my-cool-model --every=30
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
import os
|
| 50 |
+
import time
|
| 51 |
+
import warnings
|
| 52 |
+
from argparse import Namespace, _SubParsersAction
|
| 53 |
+
from typing import List, Optional
|
| 54 |
+
|
| 55 |
+
from huggingface_hub import logging
|
| 56 |
+
from huggingface_hub._commit_scheduler import CommitScheduler
|
| 57 |
+
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
| 58 |
+
from huggingface_hub.constants import HF_HUB_ENABLE_HF_TRANSFER
|
| 59 |
+
from huggingface_hub.errors import RevisionNotFoundError
|
| 60 |
+
from huggingface_hub.hf_api import HfApi
|
| 61 |
+
from huggingface_hub.utils import disable_progress_bars, enable_progress_bars
|
| 62 |
+
from huggingface_hub.utils._runtime import is_xet_available
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
logger = logging.get_logger(__name__)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class UploadCommand(BaseHuggingfaceCLICommand):
|
| 69 |
+
@staticmethod
|
| 70 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 71 |
+
upload_parser = parser.add_parser("upload", help="Upload a file or a folder to a repo on the Hub")
|
| 72 |
+
upload_parser.add_argument(
|
| 73 |
+
"repo_id", type=str, help="The ID of the repo to upload to (e.g. `username/repo-name`)."
|
| 74 |
+
)
|
| 75 |
+
upload_parser.add_argument(
|
| 76 |
+
"local_path",
|
| 77 |
+
nargs="?",
|
| 78 |
+
help="Local path to the file or folder to upload. Wildcard patterns are supported. Defaults to current directory.",
|
| 79 |
+
)
|
| 80 |
+
upload_parser.add_argument(
|
| 81 |
+
"path_in_repo",
|
| 82 |
+
nargs="?",
|
| 83 |
+
help="Path of the file or folder in the repo. Defaults to the relative path of the file or folder.",
|
| 84 |
+
)
|
| 85 |
+
upload_parser.add_argument(
|
| 86 |
+
"--repo-type",
|
| 87 |
+
choices=["model", "dataset", "space"],
|
| 88 |
+
default="model",
|
| 89 |
+
help="Type of the repo to upload to (e.g. `dataset`).",
|
| 90 |
+
)
|
| 91 |
+
upload_parser.add_argument(
|
| 92 |
+
"--revision",
|
| 93 |
+
type=str,
|
| 94 |
+
help=(
|
| 95 |
+
"An optional Git revision to push to. It can be a branch name or a PR reference. If revision does not"
|
| 96 |
+
" exist and `--create-pr` is not set, a branch will be automatically created."
|
| 97 |
+
),
|
| 98 |
+
)
|
| 99 |
+
upload_parser.add_argument(
|
| 100 |
+
"--private",
|
| 101 |
+
action="store_true",
|
| 102 |
+
help=(
|
| 103 |
+
"Whether to create a private repo if repo doesn't exist on the Hub. Ignored if the repo already"
|
| 104 |
+
" exists."
|
| 105 |
+
),
|
| 106 |
+
)
|
| 107 |
+
upload_parser.add_argument("--include", nargs="*", type=str, help="Glob patterns to match files to upload.")
|
| 108 |
+
upload_parser.add_argument(
|
| 109 |
+
"--exclude", nargs="*", type=str, help="Glob patterns to exclude from files to upload."
|
| 110 |
+
)
|
| 111 |
+
upload_parser.add_argument(
|
| 112 |
+
"--delete",
|
| 113 |
+
nargs="*",
|
| 114 |
+
type=str,
|
| 115 |
+
help="Glob patterns for file to be deleted from the repo while committing.",
|
| 116 |
+
)
|
| 117 |
+
upload_parser.add_argument(
|
| 118 |
+
"--commit-message", type=str, help="The summary / title / first line of the generated commit."
|
| 119 |
+
)
|
| 120 |
+
upload_parser.add_argument("--commit-description", type=str, help="The description of the generated commit.")
|
| 121 |
+
upload_parser.add_argument(
|
| 122 |
+
"--create-pr", action="store_true", help="Whether to upload content as a new Pull Request."
|
| 123 |
+
)
|
| 124 |
+
upload_parser.add_argument(
|
| 125 |
+
"--every",
|
| 126 |
+
type=float,
|
| 127 |
+
help="If set, a background job is scheduled to create commits every `every` minutes.",
|
| 128 |
+
)
|
| 129 |
+
upload_parser.add_argument(
|
| 130 |
+
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
| 131 |
+
)
|
| 132 |
+
upload_parser.add_argument(
|
| 133 |
+
"--quiet",
|
| 134 |
+
action="store_true",
|
| 135 |
+
help="If True, progress bars are disabled and only the path to the uploaded files is printed.",
|
| 136 |
+
)
|
| 137 |
+
upload_parser.set_defaults(func=UploadCommand)
|
| 138 |
+
|
| 139 |
+
def __init__(self, args: Namespace) -> None:
|
| 140 |
+
self.repo_id: str = args.repo_id
|
| 141 |
+
self.repo_type: Optional[str] = args.repo_type
|
| 142 |
+
self.revision: Optional[str] = args.revision
|
| 143 |
+
self.private: bool = args.private
|
| 144 |
+
|
| 145 |
+
self.include: Optional[List[str]] = args.include
|
| 146 |
+
self.exclude: Optional[List[str]] = args.exclude
|
| 147 |
+
self.delete: Optional[List[str]] = args.delete
|
| 148 |
+
|
| 149 |
+
self.commit_message: Optional[str] = args.commit_message
|
| 150 |
+
self.commit_description: Optional[str] = args.commit_description
|
| 151 |
+
self.create_pr: bool = args.create_pr
|
| 152 |
+
self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli")
|
| 153 |
+
self.quiet: bool = args.quiet # disable warnings and progress bars
|
| 154 |
+
|
| 155 |
+
# Check `--every` is valid
|
| 156 |
+
if args.every is not None and args.every <= 0:
|
| 157 |
+
raise ValueError(f"`every` must be a positive value (got '{args.every}')")
|
| 158 |
+
self.every: Optional[float] = args.every
|
| 159 |
+
|
| 160 |
+
# Resolve `local_path` and `path_in_repo`
|
| 161 |
+
repo_name: str = args.repo_id.split("/")[-1] # e.g. "Wauplin/my-cool-model" => "my-cool-model"
|
| 162 |
+
self.local_path: str
|
| 163 |
+
self.path_in_repo: str
|
| 164 |
+
|
| 165 |
+
if args.local_path is not None and any(c in args.local_path for c in ["*", "?", "["]):
|
| 166 |
+
if args.include is not None:
|
| 167 |
+
raise ValueError("Cannot set `--include` when passing a `local_path` containing a wildcard.")
|
| 168 |
+
if args.path_in_repo is not None and args.path_in_repo != ".":
|
| 169 |
+
raise ValueError("Cannot set `path_in_repo` when passing a `local_path` containing a wildcard.")
|
| 170 |
+
self.local_path = "."
|
| 171 |
+
self.include = args.local_path
|
| 172 |
+
self.path_in_repo = "."
|
| 173 |
+
elif args.local_path is None and os.path.isfile(repo_name):
|
| 174 |
+
# Implicit case 1: user provided only a repo_id which happen to be a local file as well => upload it with same name
|
| 175 |
+
self.local_path = repo_name
|
| 176 |
+
self.path_in_repo = repo_name
|
| 177 |
+
elif args.local_path is None and os.path.isdir(repo_name):
|
| 178 |
+
# Implicit case 2: user provided only a repo_id which happen to be a local folder as well => upload it at root
|
| 179 |
+
self.local_path = repo_name
|
| 180 |
+
self.path_in_repo = "."
|
| 181 |
+
elif args.local_path is None:
|
| 182 |
+
# Implicit case 3: user provided only a repo_id that does not match a local file or folder
|
| 183 |
+
# => the user must explicitly provide a local_path => raise exception
|
| 184 |
+
raise ValueError(f"'{repo_name}' is not a local file or folder. Please set `local_path` explicitly.")
|
| 185 |
+
elif args.path_in_repo is None and os.path.isfile(args.local_path):
|
| 186 |
+
# Explicit local path to file, no path in repo => upload it at root with same name
|
| 187 |
+
self.local_path = args.local_path
|
| 188 |
+
self.path_in_repo = os.path.basename(args.local_path)
|
| 189 |
+
elif args.path_in_repo is None:
|
| 190 |
+
# Explicit local path to folder, no path in repo => upload at root
|
| 191 |
+
self.local_path = args.local_path
|
| 192 |
+
self.path_in_repo = "."
|
| 193 |
+
else:
|
| 194 |
+
# Finally, if both paths are explicit
|
| 195 |
+
self.local_path = args.local_path
|
| 196 |
+
self.path_in_repo = args.path_in_repo
|
| 197 |
+
|
| 198 |
+
def run(self) -> None:
|
| 199 |
+
if self.quiet:
|
| 200 |
+
disable_progress_bars()
|
| 201 |
+
with warnings.catch_warnings():
|
| 202 |
+
warnings.simplefilter("ignore")
|
| 203 |
+
print(self._upload())
|
| 204 |
+
enable_progress_bars()
|
| 205 |
+
else:
|
| 206 |
+
logging.set_verbosity_info()
|
| 207 |
+
print(self._upload())
|
| 208 |
+
logging.set_verbosity_warning()
|
| 209 |
+
|
| 210 |
+
def _upload(self) -> str:
|
| 211 |
+
if os.path.isfile(self.local_path):
|
| 212 |
+
if self.include is not None and len(self.include) > 0:
|
| 213 |
+
warnings.warn("Ignoring `--include` since a single file is uploaded.")
|
| 214 |
+
if self.exclude is not None and len(self.exclude) > 0:
|
| 215 |
+
warnings.warn("Ignoring `--exclude` since a single file is uploaded.")
|
| 216 |
+
if self.delete is not None and len(self.delete) > 0:
|
| 217 |
+
warnings.warn("Ignoring `--delete` since a single file is uploaded.")
|
| 218 |
+
|
| 219 |
+
if not is_xet_available() and not HF_HUB_ENABLE_HF_TRANSFER:
|
| 220 |
+
logger.info(
|
| 221 |
+
"Consider using `hf_transfer` for faster uploads. This solution comes with some limitations. See"
|
| 222 |
+
" https://huggingface.co/docs/huggingface_hub/hf_transfer for more details."
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
# Schedule commits if `every` is set
|
| 226 |
+
if self.every is not None:
|
| 227 |
+
if os.path.isfile(self.local_path):
|
| 228 |
+
# If file => watch entire folder + use allow_patterns
|
| 229 |
+
folder_path = os.path.dirname(self.local_path)
|
| 230 |
+
path_in_repo = (
|
| 231 |
+
self.path_in_repo[: -len(self.local_path)] # remove filename from path_in_repo
|
| 232 |
+
if self.path_in_repo.endswith(self.local_path)
|
| 233 |
+
else self.path_in_repo
|
| 234 |
+
)
|
| 235 |
+
allow_patterns = [self.local_path]
|
| 236 |
+
ignore_patterns = []
|
| 237 |
+
else:
|
| 238 |
+
folder_path = self.local_path
|
| 239 |
+
path_in_repo = self.path_in_repo
|
| 240 |
+
allow_patterns = self.include or []
|
| 241 |
+
ignore_patterns = self.exclude or []
|
| 242 |
+
if self.delete is not None and len(self.delete) > 0:
|
| 243 |
+
warnings.warn("Ignoring `--delete` when uploading with scheduled commits.")
|
| 244 |
+
|
| 245 |
+
scheduler = CommitScheduler(
|
| 246 |
+
folder_path=folder_path,
|
| 247 |
+
repo_id=self.repo_id,
|
| 248 |
+
repo_type=self.repo_type,
|
| 249 |
+
revision=self.revision,
|
| 250 |
+
allow_patterns=allow_patterns,
|
| 251 |
+
ignore_patterns=ignore_patterns,
|
| 252 |
+
path_in_repo=path_in_repo,
|
| 253 |
+
private=self.private,
|
| 254 |
+
every=self.every,
|
| 255 |
+
hf_api=self.api,
|
| 256 |
+
)
|
| 257 |
+
print(f"Scheduling commits every {self.every} minutes to {scheduler.repo_id}.")
|
| 258 |
+
try: # Block main thread until KeyboardInterrupt
|
| 259 |
+
while True:
|
| 260 |
+
time.sleep(100)
|
| 261 |
+
except KeyboardInterrupt:
|
| 262 |
+
scheduler.stop()
|
| 263 |
+
return "Stopped scheduled commits."
|
| 264 |
+
|
| 265 |
+
# Otherwise, create repo and proceed with the upload
|
| 266 |
+
if not os.path.isfile(self.local_path) and not os.path.isdir(self.local_path):
|
| 267 |
+
raise FileNotFoundError(f"No such file or directory: '{self.local_path}'.")
|
| 268 |
+
repo_id = self.api.create_repo(
|
| 269 |
+
repo_id=self.repo_id,
|
| 270 |
+
repo_type=self.repo_type,
|
| 271 |
+
exist_ok=True,
|
| 272 |
+
private=self.private,
|
| 273 |
+
space_sdk="gradio" if self.repo_type == "space" else None,
|
| 274 |
+
# ^ We don't want it to fail when uploading to a Space => let's set Gradio by default.
|
| 275 |
+
# ^ I'd rather not add CLI args to set it explicitly as we already have `huggingface-cli repo create` for that.
|
| 276 |
+
).repo_id
|
| 277 |
+
|
| 278 |
+
# Check if branch already exists and if not, create it
|
| 279 |
+
if self.revision is not None and not self.create_pr:
|
| 280 |
+
try:
|
| 281 |
+
self.api.repo_info(repo_id=repo_id, repo_type=self.repo_type, revision=self.revision)
|
| 282 |
+
except RevisionNotFoundError:
|
| 283 |
+
logger.info(f"Branch '{self.revision}' not found. Creating it...")
|
| 284 |
+
self.api.create_branch(repo_id=repo_id, repo_type=self.repo_type, branch=self.revision, exist_ok=True)
|
| 285 |
+
# ^ `exist_ok=True` to avoid race concurrency issues
|
| 286 |
+
|
| 287 |
+
# File-based upload
|
| 288 |
+
if os.path.isfile(self.local_path):
|
| 289 |
+
return self.api.upload_file(
|
| 290 |
+
path_or_fileobj=self.local_path,
|
| 291 |
+
path_in_repo=self.path_in_repo,
|
| 292 |
+
repo_id=repo_id,
|
| 293 |
+
repo_type=self.repo_type,
|
| 294 |
+
revision=self.revision,
|
| 295 |
+
commit_message=self.commit_message,
|
| 296 |
+
commit_description=self.commit_description,
|
| 297 |
+
create_pr=self.create_pr,
|
| 298 |
+
)
|
| 299 |
+
|
| 300 |
+
# Folder-based upload
|
| 301 |
+
else:
|
| 302 |
+
return self.api.upload_folder(
|
| 303 |
+
folder_path=self.local_path,
|
| 304 |
+
path_in_repo=self.path_in_repo,
|
| 305 |
+
repo_id=repo_id,
|
| 306 |
+
repo_type=self.repo_type,
|
| 307 |
+
revision=self.revision,
|
| 308 |
+
commit_message=self.commit_message,
|
| 309 |
+
commit_description=self.commit_description,
|
| 310 |
+
create_pr=self.create_pr,
|
| 311 |
+
allow_patterns=self.include,
|
| 312 |
+
ignore_patterns=self.exclude,
|
| 313 |
+
delete_patterns=self.delete,
|
| 314 |
+
)
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/upload_large_folder.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2023-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains command to upload a large folder with the CLI."""
|
| 16 |
+
|
| 17 |
+
import os
|
| 18 |
+
from argparse import Namespace, _SubParsersAction
|
| 19 |
+
from typing import List, Optional
|
| 20 |
+
|
| 21 |
+
from huggingface_hub import logging
|
| 22 |
+
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
| 23 |
+
from huggingface_hub.hf_api import HfApi
|
| 24 |
+
from huggingface_hub.utils import disable_progress_bars
|
| 25 |
+
|
| 26 |
+
from ._cli_utils import ANSI
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
logger = logging.get_logger(__name__)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class UploadLargeFolderCommand(BaseHuggingfaceCLICommand):
|
| 33 |
+
@staticmethod
|
| 34 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 35 |
+
subparser = parser.add_parser("upload-large-folder", help="Upload a large folder to a repo on the Hub")
|
| 36 |
+
subparser.add_argument(
|
| 37 |
+
"repo_id", type=str, help="The ID of the repo to upload to (e.g. `username/repo-name`)."
|
| 38 |
+
)
|
| 39 |
+
subparser.add_argument("local_path", type=str, help="Local path to the file or folder to upload.")
|
| 40 |
+
subparser.add_argument(
|
| 41 |
+
"--repo-type",
|
| 42 |
+
choices=["model", "dataset", "space"],
|
| 43 |
+
help="Type of the repo to upload to (e.g. `dataset`).",
|
| 44 |
+
)
|
| 45 |
+
subparser.add_argument(
|
| 46 |
+
"--revision",
|
| 47 |
+
type=str,
|
| 48 |
+
help=("An optional Git revision to push to. It can be a branch name or a PR reference."),
|
| 49 |
+
)
|
| 50 |
+
subparser.add_argument(
|
| 51 |
+
"--private",
|
| 52 |
+
action="store_true",
|
| 53 |
+
help=(
|
| 54 |
+
"Whether to create a private repo if repo doesn't exist on the Hub. Ignored if the repo already exists."
|
| 55 |
+
),
|
| 56 |
+
)
|
| 57 |
+
subparser.add_argument("--include", nargs="*", type=str, help="Glob patterns to match files to upload.")
|
| 58 |
+
subparser.add_argument("--exclude", nargs="*", type=str, help="Glob patterns to exclude from files to upload.")
|
| 59 |
+
subparser.add_argument(
|
| 60 |
+
"--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens"
|
| 61 |
+
)
|
| 62 |
+
subparser.add_argument(
|
| 63 |
+
"--num-workers", type=int, help="Number of workers to use to hash, upload and commit files."
|
| 64 |
+
)
|
| 65 |
+
subparser.add_argument("--no-report", action="store_true", help="Whether to disable regular status report.")
|
| 66 |
+
subparser.add_argument("--no-bars", action="store_true", help="Whether to disable progress bars.")
|
| 67 |
+
subparser.set_defaults(func=UploadLargeFolderCommand)
|
| 68 |
+
|
| 69 |
+
def __init__(self, args: Namespace) -> None:
|
| 70 |
+
self.repo_id: str = args.repo_id
|
| 71 |
+
self.local_path: str = args.local_path
|
| 72 |
+
self.repo_type: str = args.repo_type
|
| 73 |
+
self.revision: Optional[str] = args.revision
|
| 74 |
+
self.private: bool = args.private
|
| 75 |
+
|
| 76 |
+
self.include: Optional[List[str]] = args.include
|
| 77 |
+
self.exclude: Optional[List[str]] = args.exclude
|
| 78 |
+
|
| 79 |
+
self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli")
|
| 80 |
+
|
| 81 |
+
self.num_workers: Optional[int] = args.num_workers
|
| 82 |
+
self.no_report: bool = args.no_report
|
| 83 |
+
self.no_bars: bool = args.no_bars
|
| 84 |
+
|
| 85 |
+
if not os.path.isdir(self.local_path):
|
| 86 |
+
raise ValueError("Large upload is only supported for folders.")
|
| 87 |
+
|
| 88 |
+
def run(self) -> None:
|
| 89 |
+
logging.set_verbosity_info()
|
| 90 |
+
|
| 91 |
+
print(
|
| 92 |
+
ANSI.yellow(
|
| 93 |
+
"You are about to upload a large folder to the Hub using `huggingface-cli upload-large-folder`. "
|
| 94 |
+
"This is a new feature so feedback is very welcome!\n"
|
| 95 |
+
"\n"
|
| 96 |
+
"A few things to keep in mind:\n"
|
| 97 |
+
" - Repository limits still apply: https://huggingface.co/docs/hub/repositories-recommendations\n"
|
| 98 |
+
" - Do not start several processes in parallel.\n"
|
| 99 |
+
" - You can interrupt and resume the process at any time. "
|
| 100 |
+
"The script will pick up where it left off except for partially uploaded files that would have to be entirely reuploaded.\n"
|
| 101 |
+
" - Do not upload the same folder to several repositories. If you need to do so, you must delete the `./.cache/huggingface/` folder first.\n"
|
| 102 |
+
"\n"
|
| 103 |
+
f"Some temporary metadata will be stored under `{self.local_path}/.cache/huggingface`.\n"
|
| 104 |
+
" - You must not modify those files manually.\n"
|
| 105 |
+
" - You must not delete the `./.cache/huggingface/` folder while a process is running.\n"
|
| 106 |
+
" - You can delete the `./.cache/huggingface/` folder to reinitialize the upload state when process is not running. Files will have to be hashed and preuploaded again, except for already committed files.\n"
|
| 107 |
+
"\n"
|
| 108 |
+
"If the process output is too verbose, you can disable the progress bars with `--no-bars`. "
|
| 109 |
+
"You can also entirely disable the status report with `--no-report`.\n"
|
| 110 |
+
"\n"
|
| 111 |
+
"For more details, run `huggingface-cli upload-large-folder --help` or check the documentation at "
|
| 112 |
+
"https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-large-folder."
|
| 113 |
+
)
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
if self.no_bars:
|
| 117 |
+
disable_progress_bars()
|
| 118 |
+
|
| 119 |
+
self.api.upload_large_folder(
|
| 120 |
+
repo_id=self.repo_id,
|
| 121 |
+
folder_path=self.local_path,
|
| 122 |
+
repo_type=self.repo_type,
|
| 123 |
+
revision=self.revision,
|
| 124 |
+
private=self.private,
|
| 125 |
+
allow_patterns=self.include,
|
| 126 |
+
ignore_patterns=self.exclude,
|
| 127 |
+
num_workers=self.num_workers,
|
| 128 |
+
print_report=not self.no_report,
|
| 129 |
+
)
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/user.py
ADDED
|
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Contains commands to authenticate to the Hugging Face Hub and interact with your repositories.
|
| 15 |
+
|
| 16 |
+
Usage:
|
| 17 |
+
# login and save token locally.
|
| 18 |
+
huggingface-cli login --token=hf_*** --add-to-git-credential
|
| 19 |
+
|
| 20 |
+
# switch between tokens
|
| 21 |
+
huggingface-cli auth switch
|
| 22 |
+
|
| 23 |
+
# list all tokens
|
| 24 |
+
huggingface-cli auth list
|
| 25 |
+
|
| 26 |
+
# logout from a specific token, if no token-name is provided, all tokens will be deleted from your machine.
|
| 27 |
+
huggingface-cli logout --token-name=your_token_name
|
| 28 |
+
|
| 29 |
+
# find out which huggingface.co account you are logged in as
|
| 30 |
+
huggingface-cli whoami
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
from argparse import _SubParsersAction
|
| 34 |
+
from typing import List, Optional
|
| 35 |
+
|
| 36 |
+
from requests.exceptions import HTTPError
|
| 37 |
+
|
| 38 |
+
from huggingface_hub.commands import BaseHuggingfaceCLICommand
|
| 39 |
+
from huggingface_hub.constants import ENDPOINT
|
| 40 |
+
from huggingface_hub.hf_api import HfApi
|
| 41 |
+
|
| 42 |
+
from .._login import auth_list, auth_switch, login, logout
|
| 43 |
+
from ..utils import get_stored_tokens, get_token, logging
|
| 44 |
+
from ._cli_utils import ANSI
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
logger = logging.get_logger(__name__)
|
| 48 |
+
|
| 49 |
+
try:
|
| 50 |
+
from InquirerPy import inquirer
|
| 51 |
+
from InquirerPy.base.control import Choice
|
| 52 |
+
|
| 53 |
+
_inquirer_py_available = True
|
| 54 |
+
except ImportError:
|
| 55 |
+
_inquirer_py_available = False
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class UserCommands(BaseHuggingfaceCLICommand):
|
| 59 |
+
@staticmethod
|
| 60 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 61 |
+
login_parser = parser.add_parser("login", help="Log in using a token from huggingface.co/settings/tokens")
|
| 62 |
+
login_parser.add_argument(
|
| 63 |
+
"--token",
|
| 64 |
+
type=str,
|
| 65 |
+
help="Token generated from https://huggingface.co/settings/tokens",
|
| 66 |
+
)
|
| 67 |
+
login_parser.add_argument(
|
| 68 |
+
"--add-to-git-credential",
|
| 69 |
+
action="store_true",
|
| 70 |
+
help="Optional: Save token to git credential helper.",
|
| 71 |
+
)
|
| 72 |
+
login_parser.set_defaults(func=lambda args: LoginCommand(args))
|
| 73 |
+
whoami_parser = parser.add_parser("whoami", help="Find out which huggingface.co account you are logged in as.")
|
| 74 |
+
whoami_parser.set_defaults(func=lambda args: WhoamiCommand(args))
|
| 75 |
+
|
| 76 |
+
logout_parser = parser.add_parser("logout", help="Log out")
|
| 77 |
+
logout_parser.add_argument(
|
| 78 |
+
"--token-name",
|
| 79 |
+
type=str,
|
| 80 |
+
help="Optional: Name of the access token to log out from.",
|
| 81 |
+
)
|
| 82 |
+
logout_parser.set_defaults(func=lambda args: LogoutCommand(args))
|
| 83 |
+
|
| 84 |
+
auth_parser = parser.add_parser("auth", help="Other authentication related commands")
|
| 85 |
+
auth_subparsers = auth_parser.add_subparsers(help="Authentication subcommands")
|
| 86 |
+
auth_switch_parser = auth_subparsers.add_parser("switch", help="Switch between access tokens")
|
| 87 |
+
auth_switch_parser.add_argument(
|
| 88 |
+
"--token-name",
|
| 89 |
+
type=str,
|
| 90 |
+
help="Optional: Name of the access token to switch to.",
|
| 91 |
+
)
|
| 92 |
+
auth_switch_parser.add_argument(
|
| 93 |
+
"--add-to-git-credential",
|
| 94 |
+
action="store_true",
|
| 95 |
+
help="Optional: Save token to git credential helper.",
|
| 96 |
+
)
|
| 97 |
+
auth_switch_parser.set_defaults(func=lambda args: AuthSwitchCommand(args))
|
| 98 |
+
auth_list_parser = auth_subparsers.add_parser("list", help="List all stored access tokens")
|
| 99 |
+
auth_list_parser.set_defaults(func=lambda args: AuthListCommand(args))
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class BaseUserCommand:
|
| 103 |
+
def __init__(self, args):
|
| 104 |
+
self.args = args
|
| 105 |
+
self._api = HfApi()
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class LoginCommand(BaseUserCommand):
|
| 109 |
+
def run(self):
|
| 110 |
+
logging.set_verbosity_info()
|
| 111 |
+
login(
|
| 112 |
+
token=self.args.token,
|
| 113 |
+
add_to_git_credential=self.args.add_to_git_credential,
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class LogoutCommand(BaseUserCommand):
|
| 118 |
+
def run(self):
|
| 119 |
+
logging.set_verbosity_info()
|
| 120 |
+
logout(token_name=self.args.token_name)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class AuthSwitchCommand(BaseUserCommand):
|
| 124 |
+
def run(self):
|
| 125 |
+
logging.set_verbosity_info()
|
| 126 |
+
token_name = self.args.token_name
|
| 127 |
+
if token_name is None:
|
| 128 |
+
token_name = self._select_token_name()
|
| 129 |
+
|
| 130 |
+
if token_name is None:
|
| 131 |
+
print("No token name provided. Aborting.")
|
| 132 |
+
exit()
|
| 133 |
+
auth_switch(token_name, add_to_git_credential=self.args.add_to_git_credential)
|
| 134 |
+
|
| 135 |
+
def _select_token_name(self) -> Optional[str]:
|
| 136 |
+
token_names = list(get_stored_tokens().keys())
|
| 137 |
+
|
| 138 |
+
if not token_names:
|
| 139 |
+
logger.error("No stored tokens found. Please login first.")
|
| 140 |
+
return None
|
| 141 |
+
|
| 142 |
+
if _inquirer_py_available:
|
| 143 |
+
return self._select_token_name_tui(token_names)
|
| 144 |
+
# if inquirer is not available, use a simpler terminal UI
|
| 145 |
+
print("Available stored tokens:")
|
| 146 |
+
for i, token_name in enumerate(token_names, 1):
|
| 147 |
+
print(f"{i}. {token_name}")
|
| 148 |
+
while True:
|
| 149 |
+
try:
|
| 150 |
+
choice = input("Enter the number of the token to switch to (or 'q' to quit): ")
|
| 151 |
+
if choice.lower() == "q":
|
| 152 |
+
return None
|
| 153 |
+
index = int(choice) - 1
|
| 154 |
+
if 0 <= index < len(token_names):
|
| 155 |
+
return token_names[index]
|
| 156 |
+
else:
|
| 157 |
+
print("Invalid selection. Please try again.")
|
| 158 |
+
except ValueError:
|
| 159 |
+
print("Invalid input. Please enter a number or 'q' to quit.")
|
| 160 |
+
|
| 161 |
+
def _select_token_name_tui(self, token_names: List[str]) -> Optional[str]:
|
| 162 |
+
choices = [Choice(token_name, name=token_name) for token_name in token_names]
|
| 163 |
+
try:
|
| 164 |
+
return inquirer.select(
|
| 165 |
+
message="Select a token to switch to:",
|
| 166 |
+
choices=choices,
|
| 167 |
+
default=None,
|
| 168 |
+
).execute()
|
| 169 |
+
except KeyboardInterrupt:
|
| 170 |
+
logger.info("Token selection cancelled.")
|
| 171 |
+
return None
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
class AuthListCommand(BaseUserCommand):
|
| 175 |
+
def run(self):
|
| 176 |
+
logging.set_verbosity_info()
|
| 177 |
+
auth_list()
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class WhoamiCommand(BaseUserCommand):
|
| 181 |
+
def run(self):
|
| 182 |
+
token = get_token()
|
| 183 |
+
if token is None:
|
| 184 |
+
print("Not logged in")
|
| 185 |
+
exit()
|
| 186 |
+
try:
|
| 187 |
+
info = self._api.whoami(token)
|
| 188 |
+
print(info["name"])
|
| 189 |
+
orgs = [org["name"] for org in info["orgs"]]
|
| 190 |
+
if orgs:
|
| 191 |
+
print(ANSI.bold("orgs: "), ",".join(orgs))
|
| 192 |
+
|
| 193 |
+
if ENDPOINT != "https://huggingface.co":
|
| 194 |
+
print(f"Authenticated through private endpoint: {ENDPOINT}")
|
| 195 |
+
except HTTPError as e:
|
| 196 |
+
print(e)
|
| 197 |
+
print(ANSI.red(e.response.text))
|
| 198 |
+
exit(1)
|
.venv/lib/python3.13/site-packages/huggingface_hub/commands/version.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Contains command to print information about the version.
|
| 15 |
+
|
| 16 |
+
Usage:
|
| 17 |
+
huggingface-cli version
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
from argparse import _SubParsersAction
|
| 21 |
+
|
| 22 |
+
from huggingface_hub import __version__
|
| 23 |
+
|
| 24 |
+
from . import BaseHuggingfaceCLICommand
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class VersionCommand(BaseHuggingfaceCLICommand):
|
| 28 |
+
def __init__(self, args):
|
| 29 |
+
self.args = args
|
| 30 |
+
|
| 31 |
+
@staticmethod
|
| 32 |
+
def register_subcommand(parser: _SubParsersAction):
|
| 33 |
+
version_parser = parser.add_parser("version", help="Print information about the huggingface-cli version.")
|
| 34 |
+
version_parser.set_defaults(func=VersionCommand)
|
| 35 |
+
|
| 36 |
+
def run(self) -> None:
|
| 37 |
+
print(f"huggingface_hub version: {__version__}")
|
.venv/lib/python3.13/site-packages/huggingface_hub/inference/_client.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.venv/lib/python3.13/site-packages/huggingface_hub/inference/_common.py
ADDED
|
@@ -0,0 +1,427 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2023-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains utilities used by both the sync and async inference clients."""
|
| 16 |
+
|
| 17 |
+
import base64
|
| 18 |
+
import io
|
| 19 |
+
import json
|
| 20 |
+
import logging
|
| 21 |
+
from contextlib import contextmanager
|
| 22 |
+
from dataclasses import dataclass
|
| 23 |
+
from pathlib import Path
|
| 24 |
+
from typing import (
|
| 25 |
+
TYPE_CHECKING,
|
| 26 |
+
Any,
|
| 27 |
+
AsyncIterable,
|
| 28 |
+
BinaryIO,
|
| 29 |
+
ContextManager,
|
| 30 |
+
Dict,
|
| 31 |
+
Generator,
|
| 32 |
+
Iterable,
|
| 33 |
+
List,
|
| 34 |
+
Literal,
|
| 35 |
+
NoReturn,
|
| 36 |
+
Optional,
|
| 37 |
+
Union,
|
| 38 |
+
overload,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
from requests import HTTPError
|
| 42 |
+
|
| 43 |
+
from huggingface_hub.errors import (
|
| 44 |
+
GenerationError,
|
| 45 |
+
IncompleteGenerationError,
|
| 46 |
+
OverloadedError,
|
| 47 |
+
TextGenerationError,
|
| 48 |
+
UnknownError,
|
| 49 |
+
ValidationError,
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
from ..utils import get_session, is_aiohttp_available, is_numpy_available, is_pillow_available
|
| 53 |
+
from ._generated.types import ChatCompletionStreamOutput, TextGenerationStreamOutput
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
if TYPE_CHECKING:
|
| 57 |
+
from aiohttp import ClientResponse, ClientSession
|
| 58 |
+
from PIL.Image import Image
|
| 59 |
+
|
| 60 |
+
# TYPES
|
| 61 |
+
UrlT = str
|
| 62 |
+
PathT = Union[str, Path]
|
| 63 |
+
BinaryT = Union[bytes, BinaryIO]
|
| 64 |
+
ContentT = Union[BinaryT, PathT, UrlT, "Image"]
|
| 65 |
+
|
| 66 |
+
# Use to set a Accept: image/png header
|
| 67 |
+
TASKS_EXPECTING_IMAGES = {"text-to-image", "image-to-image"}
|
| 68 |
+
|
| 69 |
+
logger = logging.getLogger(__name__)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@dataclass
|
| 73 |
+
class RequestParameters:
|
| 74 |
+
url: str
|
| 75 |
+
task: str
|
| 76 |
+
model: Optional[str]
|
| 77 |
+
json: Optional[Union[str, Dict, List]]
|
| 78 |
+
data: Optional[ContentT]
|
| 79 |
+
headers: Dict[str, Any]
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
# Add dataclass for ModelStatus. We use this dataclass in get_model_status function.
|
| 83 |
+
@dataclass
|
| 84 |
+
class ModelStatus:
|
| 85 |
+
"""
|
| 86 |
+
This Dataclass represents the model status in the HF Inference API.
|
| 87 |
+
|
| 88 |
+
Args:
|
| 89 |
+
loaded (`bool`):
|
| 90 |
+
If the model is currently loaded into HF's Inference API. Models
|
| 91 |
+
are loaded on-demand, leading to the user's first request taking longer.
|
| 92 |
+
If a model is loaded, you can be assured that it is in a healthy state.
|
| 93 |
+
state (`str`):
|
| 94 |
+
The current state of the model. This can be 'Loaded', 'Loadable', 'TooBig'.
|
| 95 |
+
If a model's state is 'Loadable', it's not too big and has a supported
|
| 96 |
+
backend. Loadable models are automatically loaded when the user first
|
| 97 |
+
requests inference on the endpoint. This means it is transparent for the
|
| 98 |
+
user to load a model, except that the first call takes longer to complete.
|
| 99 |
+
compute_type (`Dict`):
|
| 100 |
+
Information about the compute resource the model is using or will use, such as 'gpu' type and number of
|
| 101 |
+
replicas.
|
| 102 |
+
framework (`str`):
|
| 103 |
+
The name of the framework that the model was built with, such as 'transformers'
|
| 104 |
+
or 'text-generation-inference'.
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
loaded: bool
|
| 108 |
+
state: str
|
| 109 |
+
compute_type: Dict
|
| 110 |
+
framework: str
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
## IMPORT UTILS
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def _import_aiohttp():
|
| 117 |
+
# Make sure `aiohttp` is installed on the machine.
|
| 118 |
+
if not is_aiohttp_available():
|
| 119 |
+
raise ImportError("Please install aiohttp to use `AsyncInferenceClient` (`pip install aiohttp`).")
|
| 120 |
+
import aiohttp
|
| 121 |
+
|
| 122 |
+
return aiohttp
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def _import_numpy():
|
| 126 |
+
"""Make sure `numpy` is installed on the machine."""
|
| 127 |
+
if not is_numpy_available():
|
| 128 |
+
raise ImportError("Please install numpy to use deal with embeddings (`pip install numpy`).")
|
| 129 |
+
import numpy
|
| 130 |
+
|
| 131 |
+
return numpy
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def _import_pil_image():
|
| 135 |
+
"""Make sure `PIL` is installed on the machine."""
|
| 136 |
+
if not is_pillow_available():
|
| 137 |
+
raise ImportError(
|
| 138 |
+
"Please install Pillow to use deal with images (`pip install Pillow`). If you don't want the image to be"
|
| 139 |
+
" post-processed, use `client.post(...)` and get the raw response from the server."
|
| 140 |
+
)
|
| 141 |
+
from PIL import Image
|
| 142 |
+
|
| 143 |
+
return Image
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
## ENCODING / DECODING UTILS
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
@overload
|
| 150 |
+
def _open_as_binary(
|
| 151 |
+
content: ContentT,
|
| 152 |
+
) -> ContextManager[BinaryT]: ... # means "if input is not None, output is not None"
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
@overload
|
| 156 |
+
def _open_as_binary(
|
| 157 |
+
content: Literal[None],
|
| 158 |
+
) -> ContextManager[Literal[None]]: ... # means "if input is None, output is None"
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
@contextmanager # type: ignore
|
| 162 |
+
def _open_as_binary(content: Optional[ContentT]) -> Generator[Optional[BinaryT], None, None]:
|
| 163 |
+
"""Open `content` as a binary file, either from a URL, a local path, or raw bytes.
|
| 164 |
+
|
| 165 |
+
Do nothing if `content` is None,
|
| 166 |
+
|
| 167 |
+
TODO: handle base64 as input
|
| 168 |
+
"""
|
| 169 |
+
# If content is a string => must be either a URL or a path
|
| 170 |
+
if isinstance(content, str):
|
| 171 |
+
if content.startswith("https://") or content.startswith("http://"):
|
| 172 |
+
logger.debug(f"Downloading content from {content}")
|
| 173 |
+
yield get_session().get(content).content # TODO: retrieve as stream and pipe to post request ?
|
| 174 |
+
return
|
| 175 |
+
content = Path(content)
|
| 176 |
+
if not content.exists():
|
| 177 |
+
raise FileNotFoundError(
|
| 178 |
+
f"File not found at {content}. If `data` is a string, it must either be a URL or a path to a local"
|
| 179 |
+
" file. To pass raw content, please encode it as bytes first."
|
| 180 |
+
)
|
| 181 |
+
|
| 182 |
+
# If content is a Path => open it
|
| 183 |
+
if isinstance(content, Path):
|
| 184 |
+
logger.debug(f"Opening content from {content}")
|
| 185 |
+
with content.open("rb") as f:
|
| 186 |
+
yield f
|
| 187 |
+
elif hasattr(content, "save"): # PIL Image
|
| 188 |
+
logger.debug("Converting PIL Image to bytes")
|
| 189 |
+
buffer = io.BytesIO()
|
| 190 |
+
content.save(buffer, format="PNG")
|
| 191 |
+
buffer.seek(0)
|
| 192 |
+
yield buffer
|
| 193 |
+
else:
|
| 194 |
+
# Otherwise: already a file-like object or None
|
| 195 |
+
yield content
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def _b64_encode(content: ContentT) -> str:
|
| 199 |
+
"""Encode a raw file (image, audio) into base64. Can be bytes, an opened file, a path or a URL."""
|
| 200 |
+
with _open_as_binary(content) as data:
|
| 201 |
+
data_as_bytes = data if isinstance(data, bytes) else data.read()
|
| 202 |
+
return base64.b64encode(data_as_bytes).decode()
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def _b64_to_image(encoded_image: str) -> "Image":
|
| 206 |
+
"""Parse a base64-encoded string into a PIL Image."""
|
| 207 |
+
Image = _import_pil_image()
|
| 208 |
+
return Image.open(io.BytesIO(base64.b64decode(encoded_image)))
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def _bytes_to_list(content: bytes) -> List:
|
| 212 |
+
"""Parse bytes from a Response object into a Python list.
|
| 213 |
+
|
| 214 |
+
Expects the response body to be JSON-encoded data.
|
| 215 |
+
|
| 216 |
+
NOTE: This is exactly the same implementation as `_bytes_to_dict` and will not complain if the returned data is a
|
| 217 |
+
dictionary. The only advantage of having both is to help the user (and mypy) understand what kind of data to expect.
|
| 218 |
+
"""
|
| 219 |
+
return json.loads(content.decode())
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def _bytes_to_dict(content: bytes) -> Dict:
|
| 223 |
+
"""Parse bytes from a Response object into a Python dictionary.
|
| 224 |
+
|
| 225 |
+
Expects the response body to be JSON-encoded data.
|
| 226 |
+
|
| 227 |
+
NOTE: This is exactly the same implementation as `_bytes_to_list` and will not complain if the returned data is a
|
| 228 |
+
list. The only advantage of having both is to help the user (and mypy) understand what kind of data to expect.
|
| 229 |
+
"""
|
| 230 |
+
return json.loads(content.decode())
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _bytes_to_image(content: bytes) -> "Image":
|
| 234 |
+
"""Parse bytes from a Response object into a PIL Image.
|
| 235 |
+
|
| 236 |
+
Expects the response body to be raw bytes. To deal with b64 encoded images, use `_b64_to_image` instead.
|
| 237 |
+
"""
|
| 238 |
+
Image = _import_pil_image()
|
| 239 |
+
return Image.open(io.BytesIO(content))
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def _as_dict(response: Union[bytes, Dict]) -> Dict:
|
| 243 |
+
return json.loads(response) if isinstance(response, bytes) else response
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
## PAYLOAD UTILS
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
## STREAMING UTILS
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def _stream_text_generation_response(
|
| 253 |
+
bytes_output_as_lines: Iterable[bytes], details: bool
|
| 254 |
+
) -> Union[Iterable[str], Iterable[TextGenerationStreamOutput]]:
|
| 255 |
+
"""Used in `InferenceClient.text_generation`."""
|
| 256 |
+
# Parse ServerSentEvents
|
| 257 |
+
for byte_payload in bytes_output_as_lines:
|
| 258 |
+
try:
|
| 259 |
+
output = _format_text_generation_stream_output(byte_payload, details)
|
| 260 |
+
except StopIteration:
|
| 261 |
+
break
|
| 262 |
+
if output is not None:
|
| 263 |
+
yield output
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
async def _async_stream_text_generation_response(
|
| 267 |
+
bytes_output_as_lines: AsyncIterable[bytes], details: bool
|
| 268 |
+
) -> Union[AsyncIterable[str], AsyncIterable[TextGenerationStreamOutput]]:
|
| 269 |
+
"""Used in `AsyncInferenceClient.text_generation`."""
|
| 270 |
+
# Parse ServerSentEvents
|
| 271 |
+
async for byte_payload in bytes_output_as_lines:
|
| 272 |
+
try:
|
| 273 |
+
output = _format_text_generation_stream_output(byte_payload, details)
|
| 274 |
+
except StopIteration:
|
| 275 |
+
break
|
| 276 |
+
if output is not None:
|
| 277 |
+
yield output
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
def _format_text_generation_stream_output(
|
| 281 |
+
byte_payload: bytes, details: bool
|
| 282 |
+
) -> Optional[Union[str, TextGenerationStreamOutput]]:
|
| 283 |
+
if not byte_payload.startswith(b"data:"):
|
| 284 |
+
return None # empty line
|
| 285 |
+
|
| 286 |
+
if byte_payload.strip() == b"data: [DONE]":
|
| 287 |
+
raise StopIteration("[DONE] signal received.")
|
| 288 |
+
|
| 289 |
+
# Decode payload
|
| 290 |
+
payload = byte_payload.decode("utf-8")
|
| 291 |
+
json_payload = json.loads(payload.lstrip("data:").rstrip("/n"))
|
| 292 |
+
|
| 293 |
+
# Either an error as being returned
|
| 294 |
+
if json_payload.get("error") is not None:
|
| 295 |
+
raise _parse_text_generation_error(json_payload["error"], json_payload.get("error_type"))
|
| 296 |
+
|
| 297 |
+
# Or parse token payload
|
| 298 |
+
output = TextGenerationStreamOutput.parse_obj_as_instance(json_payload)
|
| 299 |
+
return output.token.text if not details else output
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
def _stream_chat_completion_response(
|
| 303 |
+
bytes_lines: Iterable[bytes],
|
| 304 |
+
) -> Iterable[ChatCompletionStreamOutput]:
|
| 305 |
+
"""Used in `InferenceClient.chat_completion` if model is served with TGI."""
|
| 306 |
+
for item in bytes_lines:
|
| 307 |
+
try:
|
| 308 |
+
output = _format_chat_completion_stream_output(item)
|
| 309 |
+
except StopIteration:
|
| 310 |
+
break
|
| 311 |
+
if output is not None:
|
| 312 |
+
yield output
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
async def _async_stream_chat_completion_response(
|
| 316 |
+
bytes_lines: AsyncIterable[bytes],
|
| 317 |
+
) -> AsyncIterable[ChatCompletionStreamOutput]:
|
| 318 |
+
"""Used in `AsyncInferenceClient.chat_completion`."""
|
| 319 |
+
async for item in bytes_lines:
|
| 320 |
+
try:
|
| 321 |
+
output = _format_chat_completion_stream_output(item)
|
| 322 |
+
except StopIteration:
|
| 323 |
+
break
|
| 324 |
+
if output is not None:
|
| 325 |
+
yield output
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
def _format_chat_completion_stream_output(
|
| 329 |
+
byte_payload: bytes,
|
| 330 |
+
) -> Optional[ChatCompletionStreamOutput]:
|
| 331 |
+
if not byte_payload.startswith(b"data:"):
|
| 332 |
+
return None # empty line
|
| 333 |
+
|
| 334 |
+
if byte_payload.strip() == b"data: [DONE]":
|
| 335 |
+
raise StopIteration("[DONE] signal received.")
|
| 336 |
+
|
| 337 |
+
# Decode payload
|
| 338 |
+
payload = byte_payload.decode("utf-8")
|
| 339 |
+
json_payload = json.loads(payload.lstrip("data:").rstrip("/n"))
|
| 340 |
+
|
| 341 |
+
# Either an error as being returned
|
| 342 |
+
if json_payload.get("error") is not None:
|
| 343 |
+
raise _parse_text_generation_error(json_payload["error"], json_payload.get("error_type"))
|
| 344 |
+
|
| 345 |
+
# Or parse token payload
|
| 346 |
+
return ChatCompletionStreamOutput.parse_obj_as_instance(json_payload)
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
async def _async_yield_from(client: "ClientSession", response: "ClientResponse") -> AsyncIterable[bytes]:
|
| 350 |
+
async for byte_payload in response.content:
|
| 351 |
+
yield byte_payload.strip()
|
| 352 |
+
await client.close()
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
# "TGI servers" are servers running with the `text-generation-inference` backend.
|
| 356 |
+
# This backend is the go-to solution to run large language models at scale. However,
|
| 357 |
+
# for some smaller models (e.g. "gpt2") the default `transformers` + `api-inference`
|
| 358 |
+
# solution is still in use.
|
| 359 |
+
#
|
| 360 |
+
# Both approaches have very similar APIs, but not exactly the same. What we do first in
|
| 361 |
+
# the `text_generation` method is to assume the model is served via TGI. If we realize
|
| 362 |
+
# it's not the case (i.e. we receive an HTTP 400 Bad Request), we fallback to the
|
| 363 |
+
# default API with a warning message. When that's the case, We remember the unsupported
|
| 364 |
+
# attributes for this model in the `_UNSUPPORTED_TEXT_GENERATION_KWARGS` global variable.
|
| 365 |
+
#
|
| 366 |
+
# In addition, TGI servers have a built-in API route for chat-completion, which is not
|
| 367 |
+
# available on the default API. We use this route to provide a more consistent behavior
|
| 368 |
+
# when available.
|
| 369 |
+
#
|
| 370 |
+
# For more details, see https://github.com/huggingface/text-generation-inference and
|
| 371 |
+
# https://huggingface.co/docs/api-inference/detailed_parameters#text-generation-task.
|
| 372 |
+
|
| 373 |
+
_UNSUPPORTED_TEXT_GENERATION_KWARGS: Dict[Optional[str], List[str]] = {}
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
def _set_unsupported_text_generation_kwargs(model: Optional[str], unsupported_kwargs: List[str]) -> None:
|
| 377 |
+
_UNSUPPORTED_TEXT_GENERATION_KWARGS.setdefault(model, []).extend(unsupported_kwargs)
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
def _get_unsupported_text_generation_kwargs(model: Optional[str]) -> List[str]:
|
| 381 |
+
return _UNSUPPORTED_TEXT_GENERATION_KWARGS.get(model, [])
|
| 382 |
+
|
| 383 |
+
|
| 384 |
+
# TEXT GENERATION ERRORS
|
| 385 |
+
# ----------------------
|
| 386 |
+
# Text-generation errors are parsed separately to handle as much as possible the errors returned by the text generation
|
| 387 |
+
# inference project (https://github.com/huggingface/text-generation-inference).
|
| 388 |
+
# ----------------------
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
def raise_text_generation_error(http_error: HTTPError) -> NoReturn:
|
| 392 |
+
"""
|
| 393 |
+
Try to parse text-generation-inference error message and raise HTTPError in any case.
|
| 394 |
+
|
| 395 |
+
Args:
|
| 396 |
+
error (`HTTPError`):
|
| 397 |
+
The HTTPError that have been raised.
|
| 398 |
+
"""
|
| 399 |
+
# Try to parse a Text Generation Inference error
|
| 400 |
+
|
| 401 |
+
try:
|
| 402 |
+
# Hacky way to retrieve payload in case of aiohttp error
|
| 403 |
+
payload = getattr(http_error, "response_error_payload", None) or http_error.response.json()
|
| 404 |
+
error = payload.get("error")
|
| 405 |
+
error_type = payload.get("error_type")
|
| 406 |
+
except Exception: # no payload
|
| 407 |
+
raise http_error
|
| 408 |
+
|
| 409 |
+
# If error_type => more information than `hf_raise_for_status`
|
| 410 |
+
if error_type is not None:
|
| 411 |
+
exception = _parse_text_generation_error(error, error_type)
|
| 412 |
+
raise exception from http_error
|
| 413 |
+
|
| 414 |
+
# Otherwise, fallback to default error
|
| 415 |
+
raise http_error
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
def _parse_text_generation_error(error: Optional[str], error_type: Optional[str]) -> TextGenerationError:
|
| 419 |
+
if error_type == "generation":
|
| 420 |
+
return GenerationError(error) # type: ignore
|
| 421 |
+
if error_type == "incomplete_generation":
|
| 422 |
+
return IncompleteGenerationError(error) # type: ignore
|
| 423 |
+
if error_type == "overloaded":
|
| 424 |
+
return OverloadedError(error) # type: ignore
|
| 425 |
+
if error_type == "validation":
|
| 426 |
+
return ValidationError(error) # type: ignore
|
| 427 |
+
return UnknownError(error) # type: ignore
|
.venv/lib/python3.13/site-packages/huggingface_hub/inference/_generated/__init__.py
ADDED
|
File without changes
|