ZTWHHH commited on
Commit
e2eea08
·
verified ·
1 Parent(s): 0b59cdb

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/_commit_scheduler.cpython-310.pyc +0 -0
  2. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/_local_folder.cpython-310.pyc +0 -0
  3. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/_tensorboard_logger.cpython-310.pyc +0 -0
  4. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/_webhooks_server.cpython-310.pyc +0 -0
  5. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/constants.cpython-310.pyc +0 -0
  6. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/file_download.cpython-310.pyc +0 -0
  7. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_file_system.cpython-310.pyc +0 -0
  8. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/inference_api.cpython-310.pyc +0 -0
  9. parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/keras_mixin.cpython-310.pyc +0 -0
  10. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/__init__.cpython-310.pyc +0 -0
  11. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/delete_cache.cpython-310.pyc +0 -0
  12. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/download.cpython-310.pyc +0 -0
  13. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/env.cpython-310.pyc +0 -0
  14. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/lfs.cpython-310.pyc +0 -0
  15. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/repo_files.cpython-310.pyc +0 -0
  16. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/scan_cache.cpython-310.pyc +0 -0
  17. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/tag.cpython-310.pyc +0 -0
  18. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/upload.cpython-310.pyc +0 -0
  19. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/upload_large_folder.cpython-310.pyc +0 -0
  20. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/user.cpython-310.pyc +0 -0
  21. parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/version.cpython-310.pyc +0 -0
  22. parrot/lib/python3.10/site-packages/huggingface_hub/commands/_cli_utils.py +69 -0
  23. parrot/lib/python3.10/site-packages/huggingface_hub/commands/delete_cache.py +428 -0
  24. parrot/lib/python3.10/site-packages/huggingface_hub/commands/lfs.py +200 -0
  25. parrot/lib/python3.10/site-packages/huggingface_hub/commands/repo_files.py +128 -0
  26. parrot/lib/python3.10/site-packages/huggingface_hub/commands/tag.py +159 -0
  27. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_to_audio.cpython-310.pyc +0 -0
  28. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/base.cpython-310.pyc +0 -0
  29. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_classification.cpython-310.pyc +0 -0
  30. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_image.cpython-310.pyc +0 -0
  31. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/object_detection.cpython-310.pyc +0 -0
  32. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/table_question_answering.cpython-310.pyc +0 -0
  33. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/token_classification.cpython-310.pyc +0 -0
  34. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_image_classification.cpython-310.pyc +0 -0
  35. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_object_detection.cpython-310.pyc +0 -0
  36. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/chat_completion.py +301 -0
  37. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/depth_estimation.py +28 -0
  38. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/image_to_text.py +101 -0
  39. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/sentence_similarity.py +27 -0
  40. parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/token_classification.py +51 -0
  41. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  42. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_auth.cpython-310.pyc +0 -0
  43. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_cache_manager.cpython-310.pyc +0 -0
  44. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_chunk_utils.cpython-310.pyc +0 -0
  45. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_datetime.cpython-310.pyc +0 -0
  46. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_deprecation.cpython-310.pyc +0 -0
  47. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_experimental.cpython-310.pyc +0 -0
  48. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_fixes.cpython-310.pyc +0 -0
  49. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_http.cpython-310.pyc +0 -0
  50. parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_lfs.cpython-310.pyc +0 -0
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/_commit_scheduler.cpython-310.pyc ADDED
Binary file (13.8 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/_local_folder.cpython-310.pyc ADDED
Binary file (12 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/_tensorboard_logger.cpython-310.pyc ADDED
Binary file (6.86 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/_webhooks_server.cpython-310.pyc ADDED
Binary file (13.9 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/constants.cpython-310.pyc ADDED
Binary file (4.96 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/file_download.cpython-310.pyc ADDED
Binary file (44.4 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_file_system.cpython-310.pyc ADDED
Binary file (33.3 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/inference_api.cpython-310.pyc ADDED
Binary file (7.57 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/__pycache__/keras_mixin.cpython-310.pyc ADDED
Binary file (17 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (815 Bytes). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/delete_cache.cpython-310.pyc ADDED
Binary file (13.5 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/download.cpython-310.pyc ADDED
Binary file (5.65 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/env.cpython-310.pyc ADDED
Binary file (1.22 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/lfs.cpython-310.pyc ADDED
Binary file (5.77 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/repo_files.cpython-310.pyc ADDED
Binary file (4.03 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/scan_cache.cpython-310.pyc ADDED
Binary file (7.73 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/tag.cpython-310.pyc ADDED
Binary file (5.82 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/upload.cpython-310.pyc ADDED
Binary file (8.02 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/upload_large_folder.cpython-310.pyc ADDED
Binary file (4.78 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/user.cpython-310.pyc ADDED
Binary file (10.1 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/__pycache__/version.cpython-310.pyc ADDED
Binary file (1.26 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/commands/_cli_utils.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2022 The HuggingFace Team. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Contains a utility for good-looking prints."""
15
+
16
+ import os
17
+ from typing import List, Union
18
+
19
+
20
+ class ANSI:
21
+ """
22
+ Helper for en.wikipedia.org/wiki/ANSI_escape_code
23
+ """
24
+
25
+ _bold = "\u001b[1m"
26
+ _gray = "\u001b[90m"
27
+ _red = "\u001b[31m"
28
+ _reset = "\u001b[0m"
29
+ _yellow = "\u001b[33m"
30
+
31
+ @classmethod
32
+ def bold(cls, s: str) -> str:
33
+ return cls._format(s, cls._bold)
34
+
35
+ @classmethod
36
+ def gray(cls, s: str) -> str:
37
+ return cls._format(s, cls._gray)
38
+
39
+ @classmethod
40
+ def red(cls, s: str) -> str:
41
+ return cls._format(s, cls._bold + cls._red)
42
+
43
+ @classmethod
44
+ def yellow(cls, s: str) -> str:
45
+ return cls._format(s, cls._yellow)
46
+
47
+ @classmethod
48
+ def _format(cls, s: str, code: str) -> str:
49
+ if os.environ.get("NO_COLOR"):
50
+ # See https://no-color.org/
51
+ return s
52
+ return f"{code}{s}{cls._reset}"
53
+
54
+
55
+ def tabulate(rows: List[List[Union[str, int]]], headers: List[str]) -> str:
56
+ """
57
+ Inspired by:
58
+
59
+ - stackoverflow.com/a/8356620/593036
60
+ - stackoverflow.com/questions/9535954/printing-lists-as-tabular-data
61
+ """
62
+ col_widths = [max(len(str(x)) for x in col) for col in zip(*rows, headers)]
63
+ row_format = ("{{:{}}} " * len(headers)).format(*col_widths)
64
+ lines = []
65
+ lines.append(row_format.format(*headers))
66
+ lines.append(row_format.format(*["-" * w for w in col_widths]))
67
+ for row in rows:
68
+ lines.append(row_format.format(*row))
69
+ return "\n".join(lines)
parrot/lib/python3.10/site-packages/huggingface_hub/commands/delete_cache.py ADDED
@@ -0,0 +1,428 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2022-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Contains command to delete some revisions from the HF cache directory.
16
+
17
+ Usage:
18
+ huggingface-cli delete-cache
19
+ huggingface-cli delete-cache --disable-tui
20
+ huggingface-cli delete-cache --dir ~/.cache/huggingface/hub
21
+
22
+ NOTE:
23
+ This command is based on `InquirerPy` to build the multiselect menu in the terminal.
24
+ This dependency has to be installed with `pip install huggingface_hub[cli]`. Since
25
+ we want to avoid as much as possible cross-platform issues, I chose a library that
26
+ is built on top of `python-prompt-toolkit` which seems to be a reference in terminal
27
+ GUI (actively maintained on both Unix and Windows, 7.9k stars).
28
+
29
+ For the moment, the TUI feature is in beta.
30
+
31
+ See:
32
+ - https://github.com/kazhala/InquirerPy
33
+ - https://inquirerpy.readthedocs.io/en/latest/
34
+ - https://github.com/prompt-toolkit/python-prompt-toolkit
35
+
36
+ Other solutions could have been:
37
+ - `simple_term_menu`: would be good as well for our use case but some issues suggest
38
+ that Windows is less supported.
39
+ See: https://github.com/IngoMeyer441/simple-term-menu
40
+ - `PyInquirer`: very similar to `InquirerPy` but older and not maintained anymore.
41
+ In particular, no support of Python3.10.
42
+ See: https://github.com/CITGuru/PyInquirer
43
+ - `pick` (or `pickpack`): easy to use and flexible but built on top of Python's
44
+ standard library `curses` that is specific to Unix (not implemented on Windows).
45
+ See https://github.com/wong2/pick and https://github.com/anafvana/pickpack.
46
+ - `inquirer`: lot of traction (700 stars) but explicitly states "experimental
47
+ support of Windows". Not built on top of `python-prompt-toolkit`.
48
+ See https://github.com/magmax/python-inquirer
49
+
50
+ TODO: add support for `huggingface-cli delete-cache aaaaaa bbbbbb cccccc (...)` ?
51
+ TODO: add "--keep-last" arg to delete revisions that are not on `main` ref
52
+ TODO: add "--filter" arg to filter repositories by name ?
53
+ TODO: add "--sort" arg to sort by size ?
54
+ TODO: add "--limit" arg to limit to X repos ?
55
+ TODO: add "-y" arg for immediate deletion ?
56
+ See discussions in https://github.com/huggingface/huggingface_hub/issues/1025.
57
+ """
58
+
59
+ import os
60
+ from argparse import Namespace, _SubParsersAction
61
+ from functools import wraps
62
+ from tempfile import mkstemp
63
+ from typing import Any, Callable, Iterable, List, Optional, Union
64
+
65
+ from ..utils import CachedRepoInfo, CachedRevisionInfo, HFCacheInfo, scan_cache_dir
66
+ from . import BaseHuggingfaceCLICommand
67
+ from ._cli_utils import ANSI
68
+
69
+
70
+ try:
71
+ from InquirerPy import inquirer
72
+ from InquirerPy.base.control import Choice
73
+ from InquirerPy.separator import Separator
74
+
75
+ _inquirer_py_available = True
76
+ except ImportError:
77
+ _inquirer_py_available = False
78
+
79
+
80
+ def require_inquirer_py(fn: Callable) -> Callable:
81
+ """Decorator to flag methods that require `InquirerPy`."""
82
+
83
+ # TODO: refactor this + imports in a unified pattern across codebase
84
+ @wraps(fn)
85
+ def _inner(*args, **kwargs):
86
+ if not _inquirer_py_available:
87
+ raise ImportError(
88
+ "The `delete-cache` command requires extra dependencies to work with"
89
+ " the TUI.\nPlease run `pip install huggingface_hub[cli]` to install"
90
+ " them.\nOtherwise, disable TUI using the `--disable-tui` flag."
91
+ )
92
+
93
+ return fn(*args, **kwargs)
94
+
95
+ return _inner
96
+
97
+
98
+ # Possibility for the user to cancel deletion
99
+ _CANCEL_DELETION_STR = "CANCEL_DELETION"
100
+
101
+
102
+ class DeleteCacheCommand(BaseHuggingfaceCLICommand):
103
+ @staticmethod
104
+ def register_subcommand(parser: _SubParsersAction):
105
+ delete_cache_parser = parser.add_parser("delete-cache", help="Delete revisions from the cache directory.")
106
+
107
+ delete_cache_parser.add_argument(
108
+ "--dir",
109
+ type=str,
110
+ default=None,
111
+ help="cache directory (optional). Default to the default HuggingFace cache.",
112
+ )
113
+
114
+ delete_cache_parser.add_argument(
115
+ "--disable-tui",
116
+ action="store_true",
117
+ help=(
118
+ "Disable Terminal User Interface (TUI) mode. Useful if your"
119
+ " platform/terminal doesn't support the multiselect menu."
120
+ ),
121
+ )
122
+
123
+ delete_cache_parser.set_defaults(func=DeleteCacheCommand)
124
+
125
+ def __init__(self, args: Namespace) -> None:
126
+ self.cache_dir: Optional[str] = args.dir
127
+ self.disable_tui: bool = args.disable_tui
128
+
129
+ def run(self):
130
+ """Run `delete-cache` command with or without TUI."""
131
+ # Scan cache directory
132
+ hf_cache_info = scan_cache_dir(self.cache_dir)
133
+
134
+ # Manual review from the user
135
+ if self.disable_tui:
136
+ selected_hashes = _manual_review_no_tui(hf_cache_info, preselected=[])
137
+ else:
138
+ selected_hashes = _manual_review_tui(hf_cache_info, preselected=[])
139
+
140
+ # If deletion is not cancelled
141
+ if len(selected_hashes) > 0 and _CANCEL_DELETION_STR not in selected_hashes:
142
+ confirm_message = _get_expectations_str(hf_cache_info, selected_hashes) + " Confirm deletion ?"
143
+
144
+ # Confirm deletion
145
+ if self.disable_tui:
146
+ confirmed = _ask_for_confirmation_no_tui(confirm_message)
147
+ else:
148
+ confirmed = _ask_for_confirmation_tui(confirm_message)
149
+
150
+ # Deletion is confirmed
151
+ if confirmed:
152
+ strategy = hf_cache_info.delete_revisions(*selected_hashes)
153
+ print("Start deletion.")
154
+ strategy.execute()
155
+ print(
156
+ f"Done. Deleted {len(strategy.repos)} repo(s) and"
157
+ f" {len(strategy.snapshots)} revision(s) for a total of"
158
+ f" {strategy.expected_freed_size_str}."
159
+ )
160
+ return
161
+
162
+ # Deletion is cancelled
163
+ print("Deletion is cancelled. Do nothing.")
164
+
165
+
166
+ @require_inquirer_py
167
+ def _manual_review_tui(hf_cache_info: HFCacheInfo, preselected: List[str]) -> List[str]:
168
+ """Ask the user for a manual review of the revisions to delete.
169
+
170
+ Displays a multi-select menu in the terminal (TUI).
171
+ """
172
+ # Define multiselect list
173
+ choices = _get_tui_choices_from_scan(repos=hf_cache_info.repos, preselected=preselected)
174
+ checkbox = inquirer.checkbox(
175
+ message="Select revisions to delete:",
176
+ choices=choices, # List of revisions with some pre-selection
177
+ cycle=False, # No loop between top and bottom
178
+ height=100, # Large list if possible
179
+ # We use the instruction to display to the user the expected effect of the
180
+ # deletion.
181
+ instruction=_get_expectations_str(
182
+ hf_cache_info,
183
+ selected_hashes=[c.value for c in choices if isinstance(c, Choice) and c.enabled],
184
+ ),
185
+ # We use the long instruction to should keybindings instructions to the user
186
+ long_instruction="Press <space> to select, <enter> to validate and <ctrl+c> to quit without modification.",
187
+ # Message that is displayed once the user validates its selection.
188
+ transformer=lambda result: f"{len(result)} revision(s) selected.",
189
+ )
190
+
191
+ # Add a callback to update the information line when a revision is
192
+ # selected/unselected
193
+ def _update_expectations(_) -> None:
194
+ # Hacky way to dynamically set an instruction message to the checkbox when
195
+ # a revision hash is selected/unselected.
196
+ checkbox._instruction = _get_expectations_str(
197
+ hf_cache_info,
198
+ selected_hashes=[choice["value"] for choice in checkbox.content_control.choices if choice["enabled"]],
199
+ )
200
+
201
+ checkbox.kb_func_lookup["toggle"].append({"func": _update_expectations})
202
+
203
+ # Finally display the form to the user.
204
+ try:
205
+ return checkbox.execute()
206
+ except KeyboardInterrupt:
207
+ return [] # Quit without deletion
208
+
209
+
210
+ @require_inquirer_py
211
+ def _ask_for_confirmation_tui(message: str, default: bool = True) -> bool:
212
+ """Ask for confirmation using Inquirer."""
213
+ return inquirer.confirm(message, default=default).execute()
214
+
215
+
216
+ def _get_tui_choices_from_scan(repos: Iterable[CachedRepoInfo], preselected: List[str]) -> List:
217
+ """Build a list of choices from the scanned repos.
218
+
219
+ Args:
220
+ repos (*Iterable[`CachedRepoInfo`]*):
221
+ List of scanned repos on which we want to delete revisions.
222
+ preselected (*List[`str`]*):
223
+ List of revision hashes that will be preselected.
224
+
225
+ Return:
226
+ The list of choices to pass to `inquirer.checkbox`.
227
+ """
228
+ choices: List[Union[Choice, Separator]] = []
229
+
230
+ # First choice is to cancel the deletion. If selected, nothing will be deleted,
231
+ # no matter the other selected items.
232
+ choices.append(
233
+ Choice(
234
+ _CANCEL_DELETION_STR,
235
+ name="None of the following (if selected, nothing will be deleted).",
236
+ enabled=False,
237
+ )
238
+ )
239
+
240
+ # Display a separator per repo and a Choice for each revisions of the repo
241
+ for repo in sorted(repos, key=_repo_sorting_order):
242
+ # Repo as separator
243
+ choices.append(
244
+ Separator(
245
+ f"\n{repo.repo_type.capitalize()} {repo.repo_id} ({repo.size_on_disk_str},"
246
+ f" used {repo.last_accessed_str})"
247
+ )
248
+ )
249
+ for revision in sorted(repo.revisions, key=_revision_sorting_order):
250
+ # Revision as choice
251
+ choices.append(
252
+ Choice(
253
+ revision.commit_hash,
254
+ name=(
255
+ f"{revision.commit_hash[:8]}:"
256
+ f" {', '.join(sorted(revision.refs)) or '(detached)'} #"
257
+ f" modified {revision.last_modified_str}"
258
+ ),
259
+ enabled=revision.commit_hash in preselected,
260
+ )
261
+ )
262
+
263
+ # Return choices
264
+ return choices
265
+
266
+
267
+ def _manual_review_no_tui(hf_cache_info: HFCacheInfo, preselected: List[str]) -> List[str]:
268
+ """Ask the user for a manual review of the revisions to delete.
269
+
270
+ Used when TUI is disabled. Manual review happens in a separate tmp file that the
271
+ user can manually edit.
272
+ """
273
+ # 1. Generate temporary file with delete commands.
274
+ fd, tmp_path = mkstemp(suffix=".txt") # suffix to make it easier to find by editors
275
+ os.close(fd)
276
+
277
+ lines = []
278
+ for repo in sorted(hf_cache_info.repos, key=_repo_sorting_order):
279
+ lines.append(
280
+ f"\n# {repo.repo_type.capitalize()} {repo.repo_id} ({repo.size_on_disk_str},"
281
+ f" used {repo.last_accessed_str})"
282
+ )
283
+ for revision in sorted(repo.revisions, key=_revision_sorting_order):
284
+ lines.append(
285
+ # Deselect by prepending a '#'
286
+ f"{'' if revision.commit_hash in preselected else '#'} "
287
+ f" {revision.commit_hash} # Refs:"
288
+ # Print `refs` as comment on same line
289
+ f" {', '.join(sorted(revision.refs)) or '(detached)'} # modified"
290
+ # Print `last_modified` as comment on same line
291
+ f" {revision.last_modified_str}"
292
+ )
293
+
294
+ with open(tmp_path, "w") as f:
295
+ f.write(_MANUAL_REVIEW_NO_TUI_INSTRUCTIONS)
296
+ f.write("\n".join(lines))
297
+
298
+ # 2. Prompt instructions to user.
299
+ instructions = f"""
300
+ TUI is disabled. In order to select which revisions you want to delete, please edit
301
+ the following file using the text editor of your choice. Instructions for manual
302
+ editing are located at the beginning of the file. Edit the file, save it and confirm
303
+ to continue.
304
+ File to edit: {ANSI.bold(tmp_path)}
305
+ """
306
+ print("\n".join(line.strip() for line in instructions.strip().split("\n")))
307
+
308
+ # 3. Wait for user confirmation.
309
+ while True:
310
+ selected_hashes = _read_manual_review_tmp_file(tmp_path)
311
+ if _ask_for_confirmation_no_tui(
312
+ _get_expectations_str(hf_cache_info, selected_hashes) + " Continue ?",
313
+ default=False,
314
+ ):
315
+ break
316
+
317
+ # 4. Return selected_hashes
318
+ os.remove(tmp_path)
319
+ return selected_hashes
320
+
321
+
322
+ def _ask_for_confirmation_no_tui(message: str, default: bool = True) -> bool:
323
+ """Ask for confirmation using pure-python."""
324
+ YES = ("y", "yes", "1")
325
+ NO = ("n", "no", "0")
326
+ DEFAULT = ""
327
+ ALL = YES + NO + (DEFAULT,)
328
+ full_message = message + (" (Y/n) " if default else " (y/N) ")
329
+ while True:
330
+ answer = input(full_message).lower()
331
+ if answer == DEFAULT:
332
+ return default
333
+ if answer in YES:
334
+ return True
335
+ if answer in NO:
336
+ return False
337
+ print(f"Invalid input. Must be one of {ALL}")
338
+
339
+
340
+ def _get_expectations_str(hf_cache_info: HFCacheInfo, selected_hashes: List[str]) -> str:
341
+ """Format a string to display to the user how much space would be saved.
342
+
343
+ Example:
344
+ ```
345
+ >>> _get_expectations_str(hf_cache_info, selected_hashes)
346
+ '7 revisions selected counting for 4.3G.'
347
+ ```
348
+ """
349
+ if _CANCEL_DELETION_STR in selected_hashes:
350
+ return "Nothing will be deleted."
351
+ strategy = hf_cache_info.delete_revisions(*selected_hashes)
352
+ return f"{len(selected_hashes)} revisions selected counting for {strategy.expected_freed_size_str}."
353
+
354
+
355
+ def _read_manual_review_tmp_file(tmp_path: str) -> List[str]:
356
+ """Read the manually reviewed instruction file and return a list of revision hash.
357
+
358
+ Example:
359
+ ```txt
360
+ # This is the tmp file content
361
+ ###
362
+
363
+ # Commented out line
364
+ 123456789 # revision hash
365
+
366
+ # Something else
367
+ # a_newer_hash # 2 days ago
368
+ an_older_hash # 3 days ago
369
+ ```
370
+
371
+ ```py
372
+ >>> _read_manual_review_tmp_file(tmp_path)
373
+ ['123456789', 'an_older_hash']
374
+ ```
375
+ """
376
+ with open(tmp_path) as f:
377
+ content = f.read()
378
+
379
+ # Split lines
380
+ lines = [line.strip() for line in content.split("\n")]
381
+
382
+ # Filter commented lines
383
+ selected_lines = [line for line in lines if not line.startswith("#")]
384
+
385
+ # Select only before comment
386
+ selected_hashes = [line.split("#")[0].strip() for line in selected_lines]
387
+
388
+ # Return revision hashes
389
+ return [hash for hash in selected_hashes if len(hash) > 0]
390
+
391
+
392
+ _MANUAL_REVIEW_NO_TUI_INSTRUCTIONS = f"""
393
+ # INSTRUCTIONS
394
+ # ------------
395
+ # This is a temporary file created by running `huggingface-cli delete-cache` with the
396
+ # `--disable-tui` option. It contains a set of revisions that can be deleted from your
397
+ # local cache directory.
398
+ #
399
+ # Please manually review the revisions you want to delete:
400
+ # - Revision hashes can be commented out with '#'.
401
+ # - Only non-commented revisions in this file will be deleted.
402
+ # - Revision hashes that are removed from this file are ignored as well.
403
+ # - If `{_CANCEL_DELETION_STR}` line is uncommented, the all cache deletion is cancelled and
404
+ # no changes will be applied.
405
+ #
406
+ # Once you've manually reviewed this file, please confirm deletion in the terminal. This
407
+ # file will be automatically removed once done.
408
+ # ------------
409
+
410
+ # KILL SWITCH
411
+ # ------------
412
+ # Un-comment following line to completely cancel the deletion process
413
+ # {_CANCEL_DELETION_STR}
414
+ # ------------
415
+
416
+ # REVISIONS
417
+ # ------------
418
+ """.strip()
419
+
420
+
421
+ def _repo_sorting_order(repo: CachedRepoInfo) -> Any:
422
+ # First split by Dataset/Model, then sort by last accessed (oldest first)
423
+ return (repo.repo_type, repo.last_accessed)
424
+
425
+
426
+ def _revision_sorting_order(revision: CachedRevisionInfo) -> Any:
427
+ # Sort by last modified (oldest first)
428
+ return revision.last_modified
parrot/lib/python3.10/site-packages/huggingface_hub/commands/lfs.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Implementation of a custom transfer agent for the transfer type "multipart" for
3
+ git-lfs.
4
+
5
+ Inspired by:
6
+ github.com/cbartz/git-lfs-swift-transfer-agent/blob/master/git_lfs_swift_transfer.py
7
+
8
+ Spec is: github.com/git-lfs/git-lfs/blob/master/docs/custom-transfers.md
9
+
10
+
11
+ To launch debugger while developing:
12
+
13
+ ``` [lfs "customtransfer.multipart"]
14
+ path = /path/to/huggingface_hub/.env/bin/python args = -m debugpy --listen 5678
15
+ --wait-for-client
16
+ /path/to/huggingface_hub/src/huggingface_hub/commands/huggingface_cli.py
17
+ lfs-multipart-upload ```"""
18
+
19
+ import json
20
+ import os
21
+ import subprocess
22
+ import sys
23
+ from argparse import _SubParsersAction
24
+ from typing import Dict, List, Optional
25
+
26
+ from huggingface_hub.commands import BaseHuggingfaceCLICommand
27
+ from huggingface_hub.lfs import LFS_MULTIPART_UPLOAD_COMMAND
28
+
29
+ from ..utils import get_session, hf_raise_for_status, logging
30
+ from ..utils._lfs import SliceFileObj
31
+
32
+
33
+ logger = logging.get_logger(__name__)
34
+
35
+
36
+ class LfsCommands(BaseHuggingfaceCLICommand):
37
+ """
38
+ Implementation of a custom transfer agent for the transfer type "multipart"
39
+ for git-lfs. This lets users upload large files >5GB 🔥. Spec for LFS custom
40
+ transfer agent is:
41
+ https://github.com/git-lfs/git-lfs/blob/master/docs/custom-transfers.md
42
+
43
+ This introduces two commands to the CLI:
44
+
45
+ 1. $ huggingface-cli lfs-enable-largefiles
46
+
47
+ This should be executed once for each model repo that contains a model file
48
+ >5GB. It's documented in the error message you get if you just try to git
49
+ push a 5GB file without having enabled it before.
50
+
51
+ 2. $ huggingface-cli lfs-multipart-upload
52
+
53
+ This command is called by lfs directly and is not meant to be called by the
54
+ user.
55
+ """
56
+
57
+ @staticmethod
58
+ def register_subcommand(parser: _SubParsersAction):
59
+ enable_parser = parser.add_parser(
60
+ "lfs-enable-largefiles", help="Configure your repository to enable upload of files > 5GB."
61
+ )
62
+ enable_parser.add_argument("path", type=str, help="Local path to repository you want to configure.")
63
+ enable_parser.set_defaults(func=lambda args: LfsEnableCommand(args))
64
+
65
+ # Command will get called by git-lfs, do not call it directly.
66
+ upload_parser = parser.add_parser(LFS_MULTIPART_UPLOAD_COMMAND, add_help=False)
67
+ upload_parser.set_defaults(func=lambda args: LfsUploadCommand(args))
68
+
69
+
70
+ class LfsEnableCommand:
71
+ def __init__(self, args):
72
+ self.args = args
73
+
74
+ def run(self):
75
+ local_path = os.path.abspath(self.args.path)
76
+ if not os.path.isdir(local_path):
77
+ print("This does not look like a valid git repo.")
78
+ exit(1)
79
+ subprocess.run(
80
+ "git config lfs.customtransfer.multipart.path huggingface-cli".split(),
81
+ check=True,
82
+ cwd=local_path,
83
+ )
84
+ subprocess.run(
85
+ f"git config lfs.customtransfer.multipart.args {LFS_MULTIPART_UPLOAD_COMMAND}".split(),
86
+ check=True,
87
+ cwd=local_path,
88
+ )
89
+ print("Local repo set up for largefiles")
90
+
91
+
92
+ def write_msg(msg: Dict):
93
+ """Write out the message in Line delimited JSON."""
94
+ msg_str = json.dumps(msg) + "\n"
95
+ sys.stdout.write(msg_str)
96
+ sys.stdout.flush()
97
+
98
+
99
+ def read_msg() -> Optional[Dict]:
100
+ """Read Line delimited JSON from stdin."""
101
+ msg = json.loads(sys.stdin.readline().strip())
102
+
103
+ if "terminate" in (msg.get("type"), msg.get("event")):
104
+ # terminate message received
105
+ return None
106
+
107
+ if msg.get("event") not in ("download", "upload"):
108
+ logger.critical("Received unexpected message")
109
+ sys.exit(1)
110
+
111
+ return msg
112
+
113
+
114
+ class LfsUploadCommand:
115
+ def __init__(self, args) -> None:
116
+ self.args = args
117
+
118
+ def run(self) -> None:
119
+ # Immediately after invoking a custom transfer process, git-lfs
120
+ # sends initiation data to the process over stdin.
121
+ # This tells the process useful information about the configuration.
122
+ init_msg = json.loads(sys.stdin.readline().strip())
123
+ if not (init_msg.get("event") == "init" and init_msg.get("operation") == "upload"):
124
+ write_msg({"error": {"code": 32, "message": "Wrong lfs init operation"}})
125
+ sys.exit(1)
126
+
127
+ # The transfer process should use the information it needs from the
128
+ # initiation structure, and also perform any one-off setup tasks it
129
+ # needs to do. It should then respond on stdout with a simple empty
130
+ # confirmation structure, as follows:
131
+ write_msg({})
132
+
133
+ # After the initiation exchange, git-lfs will send any number of
134
+ # transfer requests to the stdin of the transfer process, in a serial sequence.
135
+ while True:
136
+ msg = read_msg()
137
+ if msg is None:
138
+ # When all transfers have been processed, git-lfs will send
139
+ # a terminate event to the stdin of the transfer process.
140
+ # On receiving this message the transfer process should
141
+ # clean up and terminate. No response is expected.
142
+ sys.exit(0)
143
+
144
+ oid = msg["oid"]
145
+ filepath = msg["path"]
146
+ completion_url = msg["action"]["href"]
147
+ header = msg["action"]["header"]
148
+ chunk_size = int(header.pop("chunk_size"))
149
+ presigned_urls: List[str] = list(header.values())
150
+
151
+ # Send a "started" progress event to allow other workers to start.
152
+ # Otherwise they're delayed until first "progress" event is reported,
153
+ # i.e. after the first 5GB by default (!)
154
+ write_msg(
155
+ {
156
+ "event": "progress",
157
+ "oid": oid,
158
+ "bytesSoFar": 1,
159
+ "bytesSinceLast": 0,
160
+ }
161
+ )
162
+
163
+ parts = []
164
+ with open(filepath, "rb") as file:
165
+ for i, presigned_url in enumerate(presigned_urls):
166
+ with SliceFileObj(
167
+ file,
168
+ seek_from=i * chunk_size,
169
+ read_limit=chunk_size,
170
+ ) as data:
171
+ r = get_session().put(presigned_url, data=data)
172
+ hf_raise_for_status(r)
173
+ parts.append(
174
+ {
175
+ "etag": r.headers.get("etag"),
176
+ "partNumber": i + 1,
177
+ }
178
+ )
179
+ # In order to support progress reporting while data is uploading / downloading,
180
+ # the transfer process should post messages to stdout
181
+ write_msg(
182
+ {
183
+ "event": "progress",
184
+ "oid": oid,
185
+ "bytesSoFar": (i + 1) * chunk_size,
186
+ "bytesSinceLast": chunk_size,
187
+ }
188
+ )
189
+ # Not precise but that's ok.
190
+
191
+ r = get_session().post(
192
+ completion_url,
193
+ json={
194
+ "oid": oid,
195
+ "parts": parts,
196
+ },
197
+ )
198
+ hf_raise_for_status(r)
199
+
200
+ write_msg({"event": "complete", "oid": oid})
parrot/lib/python3.10/site-packages/huggingface_hub/commands/repo_files.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2023-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Contains command to update or delete files in a repository using the CLI.
16
+
17
+ Usage:
18
+ # delete all
19
+ huggingface-cli repo-files <repo_id> delete "*"
20
+
21
+ # delete single file
22
+ huggingface-cli repo-files <repo_id> delete file.txt
23
+
24
+ # delete single folder
25
+ huggingface-cli repo-files <repo_id> delete folder/
26
+
27
+ # delete multiple
28
+ huggingface-cli repo-files <repo_id> delete file.txt folder/ file2.txt
29
+
30
+ # delete multiple patterns
31
+ huggingface-cli repo-files <repo_id> delete file.txt "*.json" "folder/*.parquet"
32
+
33
+ # delete from different revision / repo-type
34
+ huggingface-cli repo-files <repo_id> delete file.txt --revision=refs/pr/1 --repo-type=dataset
35
+ """
36
+
37
+ from argparse import _SubParsersAction
38
+ from typing import List, Optional
39
+
40
+ from huggingface_hub import logging
41
+ from huggingface_hub.commands import BaseHuggingfaceCLICommand
42
+ from huggingface_hub.hf_api import HfApi
43
+
44
+
45
+ logger = logging.get_logger(__name__)
46
+
47
+
48
+ class DeleteFilesSubCommand:
49
+ def __init__(self, args) -> None:
50
+ self.args = args
51
+ self.repo_id: str = args.repo_id
52
+ self.repo_type: Optional[str] = args.repo_type
53
+ self.revision: Optional[str] = args.revision
54
+ self.api: HfApi = HfApi(token=args.token, library_name="huggingface-cli")
55
+ self.patterns: List[str] = args.patterns
56
+ self.commit_message: Optional[str] = args.commit_message
57
+ self.commit_description: Optional[str] = args.commit_description
58
+ self.create_pr: bool = args.create_pr
59
+ self.token: Optional[str] = args.token
60
+
61
+ def run(self) -> None:
62
+ logging.set_verbosity_info()
63
+ url = self.api.delete_files(
64
+ delete_patterns=self.patterns,
65
+ repo_id=self.repo_id,
66
+ repo_type=self.repo_type,
67
+ revision=self.revision,
68
+ commit_message=self.commit_message,
69
+ commit_description=self.commit_description,
70
+ create_pr=self.create_pr,
71
+ )
72
+ print(f"Files correctly deleted from repo. Commit: {url}.")
73
+ logging.set_verbosity_warning()
74
+
75
+
76
+ class RepoFilesCommand(BaseHuggingfaceCLICommand):
77
+ @staticmethod
78
+ def register_subcommand(parser: _SubParsersAction):
79
+ repo_files_parser = parser.add_parser("repo-files", help="Manage files in a repo on the Hub")
80
+ repo_files_parser.add_argument(
81
+ "repo_id", type=str, help="The ID of the repo to manage (e.g. `username/repo-name`)."
82
+ )
83
+ repo_files_subparsers = repo_files_parser.add_subparsers(
84
+ help="Action to execute against the files.",
85
+ required=True,
86
+ )
87
+ delete_subparser = repo_files_subparsers.add_parser(
88
+ "delete",
89
+ help="Delete files from a repo on the Hub",
90
+ )
91
+ delete_subparser.set_defaults(func=lambda args: DeleteFilesSubCommand(args))
92
+ delete_subparser.add_argument(
93
+ "patterns",
94
+ nargs="+",
95
+ type=str,
96
+ help="Glob patterns to match files to delete.",
97
+ )
98
+ delete_subparser.add_argument(
99
+ "--repo-type",
100
+ choices=["model", "dataset", "space"],
101
+ default="model",
102
+ help="Type of the repo to upload to (e.g. `dataset`).",
103
+ )
104
+ delete_subparser.add_argument(
105
+ "--revision",
106
+ type=str,
107
+ help=(
108
+ "An optional Git revision to push to. It can be a branch name "
109
+ "or a PR reference. If revision does not"
110
+ " exist and `--create-pr` is not set, a branch will be automatically created."
111
+ ),
112
+ )
113
+ delete_subparser.add_argument(
114
+ "--commit-message", type=str, help="The summary / title / first line of the generated commit."
115
+ )
116
+ delete_subparser.add_argument(
117
+ "--commit-description", type=str, help="The description of the generated commit."
118
+ )
119
+ delete_subparser.add_argument(
120
+ "--create-pr", action="store_true", help="Whether to create a new Pull Request for these changes."
121
+ )
122
+ repo_files_parser.add_argument(
123
+ "--token",
124
+ type=str,
125
+ help="A User Access Token generated from https://huggingface.co/settings/tokens",
126
+ )
127
+
128
+ repo_files_parser.set_defaults(func=RepoFilesCommand)
parrot/lib/python3.10/site-packages/huggingface_hub/commands/tag.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2024-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ """Contains commands to perform tag management with the CLI.
17
+
18
+ Usage Examples:
19
+ - Create a tag:
20
+ $ huggingface-cli tag user/my-model 1.0 --message "First release"
21
+ $ huggingface-cli tag user/my-model 1.0 -m "First release" --revision develop
22
+ $ huggingface-cli tag user/my-dataset 1.0 -m "First release" --repo-type dataset
23
+ $ huggingface-cli tag user/my-space 1.0
24
+ - List all tags:
25
+ $ huggingface-cli tag -l user/my-model
26
+ $ huggingface-cli tag --list user/my-dataset --repo-type dataset
27
+ - Delete a tag:
28
+ $ huggingface-cli tag -d user/my-model 1.0
29
+ $ huggingface-cli tag --delete user/my-dataset 1.0 --repo-type dataset
30
+ $ huggingface-cli tag -d user/my-space 1.0 -y
31
+ """
32
+
33
+ from argparse import Namespace, _SubParsersAction
34
+
35
+ from requests.exceptions import HTTPError
36
+
37
+ from huggingface_hub.commands import BaseHuggingfaceCLICommand
38
+ from huggingface_hub.constants import (
39
+ REPO_TYPES,
40
+ )
41
+ from huggingface_hub.hf_api import HfApi
42
+
43
+ from ..errors import HfHubHTTPError, RepositoryNotFoundError, RevisionNotFoundError
44
+ from ._cli_utils import ANSI
45
+
46
+
47
+ class TagCommands(BaseHuggingfaceCLICommand):
48
+ @staticmethod
49
+ def register_subcommand(parser: _SubParsersAction):
50
+ tag_parser = parser.add_parser("tag", help="(create, list, delete) tags for a repo in the hub")
51
+
52
+ tag_parser.add_argument("repo_id", type=str, help="The ID of the repo to tag (e.g. `username/repo-name`).")
53
+ tag_parser.add_argument("tag", nargs="?", type=str, help="The name of the tag for creation or deletion.")
54
+ tag_parser.add_argument("-m", "--message", type=str, help="The description of the tag to create.")
55
+ tag_parser.add_argument("--revision", type=str, help="The git revision to tag.")
56
+ tag_parser.add_argument(
57
+ "--token", type=str, help="A User Access Token generated from https://huggingface.co/settings/tokens."
58
+ )
59
+ tag_parser.add_argument(
60
+ "--repo-type",
61
+ choices=["model", "dataset", "space"],
62
+ default="model",
63
+ help="Set the type of repository (model, dataset, or space).",
64
+ )
65
+ tag_parser.add_argument("-y", "--yes", action="store_true", help="Answer Yes to prompts automatically.")
66
+
67
+ tag_parser.add_argument("-l", "--list", action="store_true", help="List tags for a repository.")
68
+ tag_parser.add_argument("-d", "--delete", action="store_true", help="Delete a tag for a repository.")
69
+
70
+ tag_parser.set_defaults(func=lambda args: handle_commands(args))
71
+
72
+
73
+ def handle_commands(args: Namespace):
74
+ if args.list:
75
+ return TagListCommand(args)
76
+ elif args.delete:
77
+ return TagDeleteCommand(args)
78
+ else:
79
+ return TagCreateCommand(args)
80
+
81
+
82
+ class TagCommand:
83
+ def __init__(self, args: Namespace):
84
+ self.args = args
85
+ self.api = HfApi(token=self.args.token)
86
+ self.repo_id = self.args.repo_id
87
+ self.repo_type = self.args.repo_type
88
+ if self.repo_type not in REPO_TYPES:
89
+ print("Invalid repo --repo-type")
90
+ exit(1)
91
+
92
+
93
+ class TagCreateCommand(TagCommand):
94
+ def run(self):
95
+ print(f"You are about to create tag {ANSI.bold(self.args.tag)} on {self.repo_type} {ANSI.bold(self.repo_id)}")
96
+
97
+ try:
98
+ self.api.create_tag(
99
+ repo_id=self.repo_id,
100
+ tag=self.args.tag,
101
+ tag_message=self.args.message,
102
+ revision=self.args.revision,
103
+ repo_type=self.repo_type,
104
+ )
105
+ except RepositoryNotFoundError:
106
+ print(f"{self.repo_type.capitalize()} {ANSI.bold(self.repo_id)} not found.")
107
+ exit(1)
108
+ except RevisionNotFoundError:
109
+ print(f"Revision {ANSI.bold(self.args.revision)} not found.")
110
+ exit(1)
111
+ except HfHubHTTPError as e:
112
+ if e.response.status_code == 409:
113
+ print(f"Tag {ANSI.bold(self.args.tag)} already exists on {ANSI.bold(self.repo_id)}")
114
+ exit(1)
115
+ raise e
116
+
117
+ print(f"Tag {ANSI.bold(self.args.tag)} created on {ANSI.bold(self.repo_id)}")
118
+
119
+
120
+ class TagListCommand(TagCommand):
121
+ def run(self):
122
+ try:
123
+ refs = self.api.list_repo_refs(
124
+ repo_id=self.repo_id,
125
+ repo_type=self.repo_type,
126
+ )
127
+ except RepositoryNotFoundError:
128
+ print(f"{self.repo_type.capitalize()} {ANSI.bold(self.repo_id)} not found.")
129
+ exit(1)
130
+ except HTTPError as e:
131
+ print(e)
132
+ print(ANSI.red(e.response.text))
133
+ exit(1)
134
+ if len(refs.tags) == 0:
135
+ print("No tags found")
136
+ exit(0)
137
+ print(f"Tags for {self.repo_type} {ANSI.bold(self.repo_id)}:")
138
+ for tag in refs.tags:
139
+ print(tag.name)
140
+
141
+
142
+ class TagDeleteCommand(TagCommand):
143
+ def run(self):
144
+ print(f"You are about to delete tag {ANSI.bold(self.args.tag)} on {self.repo_type} {ANSI.bold(self.repo_id)}")
145
+
146
+ if not self.args.yes:
147
+ choice = input("Proceed? [Y/n] ").lower()
148
+ if choice not in ("", "y", "yes"):
149
+ print("Abort")
150
+ exit()
151
+ try:
152
+ self.api.delete_tag(repo_id=self.repo_id, tag=self.args.tag, repo_type=self.repo_type)
153
+ except RepositoryNotFoundError:
154
+ print(f"{self.repo_type.capitalize()} {ANSI.bold(self.repo_id)} not found.")
155
+ exit(1)
156
+ except RevisionNotFoundError:
157
+ print(f"Tag {ANSI.bold(self.args.tag)} not found on {ANSI.bold(self.repo_id)}")
158
+ exit(1)
159
+ print(f"Tag {ANSI.bold(self.args.tag)} deleted on {ANSI.bold(self.repo_id)}")
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/audio_to_audio.cpython-310.pyc ADDED
Binary file (936 Bytes). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/base.cpython-310.pyc ADDED
Binary file (5.13 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_classification.cpython-310.pyc ADDED
Binary file (1.37 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_to_image.cpython-310.pyc ADDED
Binary file (1.58 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/object_detection.cpython-310.pyc ADDED
Binary file (1.6 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/table_question_answering.cpython-310.pyc ADDED
Binary file (1.82 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/token_classification.cpython-310.pyc ADDED
Binary file (1.59 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_image_classification.cpython-310.pyc ADDED
Binary file (1.3 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/zero_shot_object_detection.cpython-310.pyc ADDED
Binary file (1.63 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/chat_completion.py ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Inference code generated from the JSON schema spec in @huggingface/tasks.
2
+ #
3
+ # See:
4
+ # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
+ # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
+ from typing import Any, List, Literal, Optional, Union
7
+
8
+ from .base import BaseInferenceType, dataclass_with_extra
9
+
10
+
11
+ @dataclass_with_extra
12
+ class ChatCompletionInputURL(BaseInferenceType):
13
+ url: str
14
+
15
+
16
+ ChatCompletionInputMessageChunkType = Literal["text", "image_url"]
17
+
18
+
19
+ @dataclass_with_extra
20
+ class ChatCompletionInputMessageChunk(BaseInferenceType):
21
+ type: "ChatCompletionInputMessageChunkType"
22
+ image_url: Optional[ChatCompletionInputURL] = None
23
+ text: Optional[str] = None
24
+
25
+
26
+ @dataclass_with_extra
27
+ class ChatCompletionInputMessage(BaseInferenceType):
28
+ content: Union[List[ChatCompletionInputMessageChunk], str]
29
+ role: str
30
+ name: Optional[str] = None
31
+
32
+
33
+ ChatCompletionInputGrammarTypeType = Literal["json", "regex"]
34
+
35
+
36
+ @dataclass_with_extra
37
+ class ChatCompletionInputGrammarType(BaseInferenceType):
38
+ type: "ChatCompletionInputGrammarTypeType"
39
+ value: Any
40
+ """A string that represents a [JSON Schema](https://json-schema.org/).
41
+ JSON Schema is a declarative language that allows to annotate JSON documents
42
+ with types and descriptions.
43
+ """
44
+
45
+
46
+ @dataclass_with_extra
47
+ class ChatCompletionInputStreamOptions(BaseInferenceType):
48
+ include_usage: bool
49
+ """If set, an additional chunk will be streamed before the data: [DONE] message. The usage
50
+ field on this chunk shows the token usage statistics for the entire request, and the
51
+ choices field will always be an empty array. All other chunks will also include a usage
52
+ field, but with a null value.
53
+ """
54
+
55
+
56
+ @dataclass_with_extra
57
+ class ChatCompletionInputFunctionName(BaseInferenceType):
58
+ name: str
59
+
60
+
61
+ @dataclass_with_extra
62
+ class ChatCompletionInputToolChoiceClass(BaseInferenceType):
63
+ function: ChatCompletionInputFunctionName
64
+
65
+
66
+ ChatCompletionInputToolChoiceEnum = Literal["auto", "none", "required"]
67
+
68
+
69
+ @dataclass_with_extra
70
+ class ChatCompletionInputFunctionDefinition(BaseInferenceType):
71
+ arguments: Any
72
+ name: str
73
+ description: Optional[str] = None
74
+
75
+
76
+ @dataclass_with_extra
77
+ class ChatCompletionInputTool(BaseInferenceType):
78
+ function: ChatCompletionInputFunctionDefinition
79
+ type: str
80
+
81
+
82
+ @dataclass_with_extra
83
+ class ChatCompletionInput(BaseInferenceType):
84
+ """Chat Completion Input.
85
+ Auto-generated from TGI specs.
86
+ For more details, check out
87
+ https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts.
88
+ """
89
+
90
+ messages: List[ChatCompletionInputMessage]
91
+ """A list of messages comprising the conversation so far."""
92
+ frequency_penalty: Optional[float] = None
93
+ """Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing
94
+ frequency in the text so far,
95
+ decreasing the model's likelihood to repeat the same line verbatim.
96
+ """
97
+ logit_bias: Optional[List[float]] = None
98
+ """UNUSED
99
+ Modify the likelihood of specified tokens appearing in the completion. Accepts a JSON
100
+ object that maps tokens
101
+ (specified by their token ID in the tokenizer) to an associated bias value from -100 to
102
+ 100. Mathematically,
103
+ the bias is added to the logits generated by the model prior to sampling. The exact
104
+ effect will vary per model,
105
+ but values between -1 and 1 should decrease or increase likelihood of selection; values
106
+ like -100 or 100 should
107
+ result in a ban or exclusive selection of the relevant token.
108
+ """
109
+ logprobs: Optional[bool] = None
110
+ """Whether to return log probabilities of the output tokens or not. If true, returns the log
111
+ probabilities of each
112
+ output token returned in the content of message.
113
+ """
114
+ max_tokens: Optional[int] = None
115
+ """The maximum number of tokens that can be generated in the chat completion."""
116
+ model: Optional[str] = None
117
+ """[UNUSED] ID of the model to use. See the model endpoint compatibility table for details
118
+ on which models work with the Chat API.
119
+ """
120
+ n: Optional[int] = None
121
+ """UNUSED
122
+ How many chat completion choices to generate for each input message. Note that you will
123
+ be charged based on the
124
+ number of generated tokens across all of the choices. Keep n as 1 to minimize costs.
125
+ """
126
+ presence_penalty: Optional[float] = None
127
+ """Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they
128
+ appear in the text so far,
129
+ increasing the model's likelihood to talk about new topics
130
+ """
131
+ response_format: Optional[ChatCompletionInputGrammarType] = None
132
+ seed: Optional[int] = None
133
+ stop: Optional[List[str]] = None
134
+ """Up to 4 sequences where the API will stop generating further tokens."""
135
+ stream: Optional[bool] = None
136
+ stream_options: Optional[ChatCompletionInputStreamOptions] = None
137
+ temperature: Optional[float] = None
138
+ """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the
139
+ output more random, while
140
+ lower values like 0.2 will make it more focused and deterministic.
141
+ We generally recommend altering this or `top_p` but not both.
142
+ """
143
+ tool_choice: Optional[Union[ChatCompletionInputToolChoiceClass, "ChatCompletionInputToolChoiceEnum"]] = None
144
+ tool_prompt: Optional[str] = None
145
+ """A prompt to be appended before the tools"""
146
+ tools: Optional[List[ChatCompletionInputTool]] = None
147
+ """A list of tools the model may call. Currently, only functions are supported as a tool.
148
+ Use this to provide a list of
149
+ functions the model may generate JSON inputs for.
150
+ """
151
+ top_logprobs: Optional[int] = None
152
+ """An integer between 0 and 5 specifying the number of most likely tokens to return at each
153
+ token position, each with
154
+ an associated log probability. logprobs must be set to true if this parameter is used.
155
+ """
156
+ top_p: Optional[float] = None
157
+ """An alternative to sampling with temperature, called nucleus sampling, where the model
158
+ considers the results of the
159
+ tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%
160
+ probability mass are considered.
161
+ """
162
+
163
+
164
+ @dataclass_with_extra
165
+ class ChatCompletionOutputTopLogprob(BaseInferenceType):
166
+ logprob: float
167
+ token: str
168
+
169
+
170
+ @dataclass_with_extra
171
+ class ChatCompletionOutputLogprob(BaseInferenceType):
172
+ logprob: float
173
+ token: str
174
+ top_logprobs: List[ChatCompletionOutputTopLogprob]
175
+
176
+
177
+ @dataclass_with_extra
178
+ class ChatCompletionOutputLogprobs(BaseInferenceType):
179
+ content: List[ChatCompletionOutputLogprob]
180
+
181
+
182
+ @dataclass_with_extra
183
+ class ChatCompletionOutputFunctionDefinition(BaseInferenceType):
184
+ arguments: Any
185
+ name: str
186
+ description: Optional[str] = None
187
+
188
+
189
+ @dataclass_with_extra
190
+ class ChatCompletionOutputToolCall(BaseInferenceType):
191
+ function: ChatCompletionOutputFunctionDefinition
192
+ id: str
193
+ type: str
194
+
195
+
196
+ @dataclass_with_extra
197
+ class ChatCompletionOutputMessage(BaseInferenceType):
198
+ role: str
199
+ content: Optional[str] = None
200
+ tool_calls: Optional[List[ChatCompletionOutputToolCall]] = None
201
+
202
+
203
+ @dataclass_with_extra
204
+ class ChatCompletionOutputComplete(BaseInferenceType):
205
+ finish_reason: str
206
+ index: int
207
+ message: ChatCompletionOutputMessage
208
+ logprobs: Optional[ChatCompletionOutputLogprobs] = None
209
+
210
+
211
+ @dataclass_with_extra
212
+ class ChatCompletionOutputUsage(BaseInferenceType):
213
+ completion_tokens: int
214
+ prompt_tokens: int
215
+ total_tokens: int
216
+
217
+
218
+ @dataclass_with_extra
219
+ class ChatCompletionOutput(BaseInferenceType):
220
+ """Chat Completion Output.
221
+ Auto-generated from TGI specs.
222
+ For more details, check out
223
+ https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts.
224
+ """
225
+
226
+ choices: List[ChatCompletionOutputComplete]
227
+ created: int
228
+ id: str
229
+ model: str
230
+ system_fingerprint: str
231
+ usage: ChatCompletionOutputUsage
232
+
233
+
234
+ @dataclass_with_extra
235
+ class ChatCompletionStreamOutputFunction(BaseInferenceType):
236
+ arguments: str
237
+ name: Optional[str] = None
238
+
239
+
240
+ @dataclass_with_extra
241
+ class ChatCompletionStreamOutputDeltaToolCall(BaseInferenceType):
242
+ function: ChatCompletionStreamOutputFunction
243
+ id: str
244
+ index: int
245
+ type: str
246
+
247
+
248
+ @dataclass_with_extra
249
+ class ChatCompletionStreamOutputDelta(BaseInferenceType):
250
+ role: str
251
+ content: Optional[str] = None
252
+ tool_calls: Optional[ChatCompletionStreamOutputDeltaToolCall] = None
253
+
254
+
255
+ @dataclass_with_extra
256
+ class ChatCompletionStreamOutputTopLogprob(BaseInferenceType):
257
+ logprob: float
258
+ token: str
259
+
260
+
261
+ @dataclass_with_extra
262
+ class ChatCompletionStreamOutputLogprob(BaseInferenceType):
263
+ logprob: float
264
+ token: str
265
+ top_logprobs: List[ChatCompletionStreamOutputTopLogprob]
266
+
267
+
268
+ @dataclass_with_extra
269
+ class ChatCompletionStreamOutputLogprobs(BaseInferenceType):
270
+ content: List[ChatCompletionStreamOutputLogprob]
271
+
272
+
273
+ @dataclass_with_extra
274
+ class ChatCompletionStreamOutputChoice(BaseInferenceType):
275
+ delta: ChatCompletionStreamOutputDelta
276
+ index: int
277
+ finish_reason: Optional[str] = None
278
+ logprobs: Optional[ChatCompletionStreamOutputLogprobs] = None
279
+
280
+
281
+ @dataclass_with_extra
282
+ class ChatCompletionStreamOutputUsage(BaseInferenceType):
283
+ completion_tokens: int
284
+ prompt_tokens: int
285
+ total_tokens: int
286
+
287
+
288
+ @dataclass_with_extra
289
+ class ChatCompletionStreamOutput(BaseInferenceType):
290
+ """Chat Completion Stream Output.
291
+ Auto-generated from TGI specs.
292
+ For more details, check out
293
+ https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts.
294
+ """
295
+
296
+ choices: List[ChatCompletionStreamOutputChoice]
297
+ created: int
298
+ id: str
299
+ model: str
300
+ system_fingerprint: str
301
+ usage: Optional[ChatCompletionStreamOutputUsage] = None
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/depth_estimation.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Inference code generated from the JSON schema spec in @huggingface/tasks.
2
+ #
3
+ # See:
4
+ # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
+ # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
+ from typing import Any, Dict, Optional
7
+
8
+ from .base import BaseInferenceType, dataclass_with_extra
9
+
10
+
11
+ @dataclass_with_extra
12
+ class DepthEstimationInput(BaseInferenceType):
13
+ """Inputs for Depth Estimation inference"""
14
+
15
+ inputs: Any
16
+ """The input image data"""
17
+ parameters: Optional[Dict[str, Any]] = None
18
+ """Additional inference parameters for Depth Estimation"""
19
+
20
+
21
+ @dataclass_with_extra
22
+ class DepthEstimationOutput(BaseInferenceType):
23
+ """Outputs of inference for the Depth Estimation task"""
24
+
25
+ depth: Any
26
+ """The predicted depth as an image"""
27
+ predicted_depth: Any
28
+ """The predicted depth as a tensor"""
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/image_to_text.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Inference code generated from the JSON schema spec in @huggingface/tasks.
2
+ #
3
+ # See:
4
+ # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
+ # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
+ from typing import Any, Literal, Optional, Union
7
+
8
+ from .base import BaseInferenceType, dataclass_with_extra
9
+
10
+
11
+ ImageToTextEarlyStoppingEnum = Literal["never"]
12
+
13
+
14
+ @dataclass_with_extra
15
+ class ImageToTextGenerationParameters(BaseInferenceType):
16
+ """Parametrization of the text generation process"""
17
+
18
+ do_sample: Optional[bool] = None
19
+ """Whether to use sampling instead of greedy decoding when generating new tokens."""
20
+ early_stopping: Optional[Union[bool, "ImageToTextEarlyStoppingEnum"]] = None
21
+ """Controls the stopping condition for beam-based methods."""
22
+ epsilon_cutoff: Optional[float] = None
23
+ """If set to float strictly between 0 and 1, only tokens with a conditional probability
24
+ greater than epsilon_cutoff will be sampled. In the paper, suggested values range from
25
+ 3e-4 to 9e-4, depending on the size of the model. See [Truncation Sampling as Language
26
+ Model Desmoothing](https://hf.co/papers/2210.15191) for more details.
27
+ """
28
+ eta_cutoff: Optional[float] = None
29
+ """Eta sampling is a hybrid of locally typical sampling and epsilon sampling. If set to
30
+ float strictly between 0 and 1, a token is only considered if it is greater than either
31
+ eta_cutoff or sqrt(eta_cutoff) * exp(-entropy(softmax(next_token_logits))). The latter
32
+ term is intuitively the expected next token probability, scaled by sqrt(eta_cutoff). In
33
+ the paper, suggested values range from 3e-4 to 2e-3, depending on the size of the model.
34
+ See [Truncation Sampling as Language Model Desmoothing](https://hf.co/papers/2210.15191)
35
+ for more details.
36
+ """
37
+ max_length: Optional[int] = None
38
+ """The maximum length (in tokens) of the generated text, including the input."""
39
+ max_new_tokens: Optional[int] = None
40
+ """The maximum number of tokens to generate. Takes precedence over max_length."""
41
+ min_length: Optional[int] = None
42
+ """The minimum length (in tokens) of the generated text, including the input."""
43
+ min_new_tokens: Optional[int] = None
44
+ """The minimum number of tokens to generate. Takes precedence over min_length."""
45
+ num_beam_groups: Optional[int] = None
46
+ """Number of groups to divide num_beams into in order to ensure diversity among different
47
+ groups of beams. See [this paper](https://hf.co/papers/1610.02424) for more details.
48
+ """
49
+ num_beams: Optional[int] = None
50
+ """Number of beams to use for beam search."""
51
+ penalty_alpha: Optional[float] = None
52
+ """The value balances the model confidence and the degeneration penalty in contrastive
53
+ search decoding.
54
+ """
55
+ temperature: Optional[float] = None
56
+ """The value used to modulate the next token probabilities."""
57
+ top_k: Optional[int] = None
58
+ """The number of highest probability vocabulary tokens to keep for top-k-filtering."""
59
+ top_p: Optional[float] = None
60
+ """If set to float < 1, only the smallest set of most probable tokens with probabilities
61
+ that add up to top_p or higher are kept for generation.
62
+ """
63
+ typical_p: Optional[float] = None
64
+ """Local typicality measures how similar the conditional probability of predicting a target
65
+ token next is to the expected conditional probability of predicting a random token next,
66
+ given the partial text already generated. If set to float < 1, the smallest set of the
67
+ most locally typical tokens with probabilities that add up to typical_p or higher are
68
+ kept for generation. See [this paper](https://hf.co/papers/2202.00666) for more details.
69
+ """
70
+ use_cache: Optional[bool] = None
71
+ """Whether the model should use the past last key/values attentions to speed up decoding"""
72
+
73
+
74
+ @dataclass_with_extra
75
+ class ImageToTextParameters(BaseInferenceType):
76
+ """Additional inference parameters for Image To Text"""
77
+
78
+ max_new_tokens: Optional[int] = None
79
+ """The amount of maximum tokens to generate."""
80
+ # Will be deprecated in the future when the renaming to `generation_parameters` is implemented in transformers
81
+ generate_kwargs: Optional[ImageToTextGenerationParameters] = None
82
+ """Parametrization of the text generation process"""
83
+
84
+
85
+ @dataclass_with_extra
86
+ class ImageToTextInput(BaseInferenceType):
87
+ """Inputs for Image To Text inference"""
88
+
89
+ inputs: Any
90
+ """The input image data"""
91
+ parameters: Optional[ImageToTextParameters] = None
92
+ """Additional inference parameters for Image To Text"""
93
+
94
+
95
+ @dataclass_with_extra
96
+ class ImageToTextOutput(BaseInferenceType):
97
+ """Outputs of inference for the Image To Text task"""
98
+
99
+ generated_text: Any
100
+ image_to_text_output_generated_text: Optional[str] = None
101
+ """The generated text."""
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/sentence_similarity.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Inference code generated from the JSON schema spec in @huggingface/tasks.
2
+ #
3
+ # See:
4
+ # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
+ # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
+ from typing import Any, Dict, List, Optional
7
+
8
+ from .base import BaseInferenceType, dataclass_with_extra
9
+
10
+
11
+ @dataclass_with_extra
12
+ class SentenceSimilarityInputData(BaseInferenceType):
13
+ sentences: List[str]
14
+ """A list of strings which will be compared against the source_sentence."""
15
+ source_sentence: str
16
+ """The string that you wish to compare the other strings with. This can be a phrase,
17
+ sentence, or longer passage, depending on the model being used.
18
+ """
19
+
20
+
21
+ @dataclass_with_extra
22
+ class SentenceSimilarityInput(BaseInferenceType):
23
+ """Inputs for Sentence similarity inference"""
24
+
25
+ inputs: SentenceSimilarityInputData
26
+ parameters: Optional[Dict[str, Any]] = None
27
+ """Additional inference parameters for Sentence Similarity"""
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/token_classification.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Inference code generated from the JSON schema spec in @huggingface/tasks.
2
+ #
3
+ # See:
4
+ # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
+ # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
+ from typing import List, Literal, Optional
7
+
8
+ from .base import BaseInferenceType, dataclass_with_extra
9
+
10
+
11
+ TokenClassificationAggregationStrategy = Literal["none", "simple", "first", "average", "max"]
12
+
13
+
14
+ @dataclass_with_extra
15
+ class TokenClassificationParameters(BaseInferenceType):
16
+ """Additional inference parameters for Token Classification"""
17
+
18
+ aggregation_strategy: Optional["TokenClassificationAggregationStrategy"] = None
19
+ """The strategy used to fuse tokens based on model predictions"""
20
+ ignore_labels: Optional[List[str]] = None
21
+ """A list of labels to ignore"""
22
+ stride: Optional[int] = None
23
+ """The number of overlapping tokens between chunks when splitting the input text."""
24
+
25
+
26
+ @dataclass_with_extra
27
+ class TokenClassificationInput(BaseInferenceType):
28
+ """Inputs for Token Classification inference"""
29
+
30
+ inputs: str
31
+ """The input text data"""
32
+ parameters: Optional[TokenClassificationParameters] = None
33
+ """Additional inference parameters for Token Classification"""
34
+
35
+
36
+ @dataclass_with_extra
37
+ class TokenClassificationOutputElement(BaseInferenceType):
38
+ """Outputs of inference for the Token Classification task"""
39
+
40
+ end: int
41
+ """The character position in the input where this group ends."""
42
+ score: float
43
+ """The associated score / probability"""
44
+ start: int
45
+ """The character position in the input where this group begins."""
46
+ word: str
47
+ """The corresponding text"""
48
+ entity: Optional[str] = None
49
+ """The predicted label for a single token"""
50
+ entity_group: Optional[str] = None
51
+ """The predicted label for a group of one or more tokens"""
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (3.69 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_auth.cpython-310.pyc ADDED
Binary file (6.51 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_cache_manager.cpython-310.pyc ADDED
Binary file (29.2 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_chunk_utils.cpython-310.pyc ADDED
Binary file (1.71 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_datetime.cpython-310.pyc ADDED
Binary file (1.82 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_deprecation.cpython-310.pyc ADDED
Binary file (4.94 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_experimental.cpython-310.pyc ADDED
Binary file (1.92 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_fixes.cpython-310.pyc ADDED
Binary file (3.7 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_http.cpython-310.pyc ADDED
Binary file (18.9 kB). View file
 
parrot/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_lfs.cpython-310.pyc ADDED
Binary file (3.75 kB). View file