Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- parrot/lib/libitm.so +3 -0
- parrot/lib/python3.10/site-packages/gitdb/db/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gitdb/db/__pycache__/base.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gitdb/db/__pycache__/ref.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gitdb/test/__init__.py +4 -0
- parrot/lib/python3.10/site-packages/gitdb/test/test_example.py +43 -0
- parrot/lib/python3.10/site-packages/gitdb/test/test_pack.py +249 -0
- parrot/lib/python3.10/site-packages/gitdb/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gitdb/utils/__pycache__/encoding.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/gitdb/utils/encoding.py +18 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/__init__.py +1431 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_commit_api.py +758 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_commit_scheduler.py +353 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_inference_endpoints.py +407 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_local_folder.py +432 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_login.py +520 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_snapshot_download.py +307 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_space_api.py +160 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_tensorboard_logger.py +194 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_upload_large_folder.py +622 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/_webhooks_payload.py +137 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/community.py +355 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/constants.py +235 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/fastai_utils.py +425 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/file_download.py +1625 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/hf_api.py +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py +1140 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/__init__.py +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_client.py +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_common.py +422 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/audio_to_audio.py +30 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/text_generation.py +168 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__init__.py +125 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/_common.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/black_forest_labs.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/fal_ai.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/fireworks_ai.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/hf_inference.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/hyperbolic.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/nebius.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/novita.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/replicate.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/sambanova.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/together.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/_common.py +239 -0
- parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/black_forest_labs.py +66 -0
.gitattributes
CHANGED
|
@@ -150,3 +150,5 @@ parrot/lib/python3.10/site-packages/pyarrow/_dataset_parquet.cpython-310-x86_64-
|
|
| 150 |
parrot/lib/python3.10/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 151 |
parrot/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSansCondensed.ttf filter=lfs diff=lfs merge=lfs -text
|
| 152 |
parrot/lib/libssl.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 150 |
parrot/lib/python3.10/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 151 |
parrot/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSansCondensed.ttf filter=lfs diff=lfs merge=lfs -text
|
| 152 |
parrot/lib/libssl.so filter=lfs diff=lfs merge=lfs -text
|
| 153 |
+
parrot/lib/libitm.so filter=lfs diff=lfs merge=lfs -text
|
| 154 |
+
parrot/lib/python3.10/site-packages/wandb/sdk/__pycache__/wandb_run.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
parrot/lib/libitm.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:70a7a1a8352b39da726e026874f1854096cdd1c60e80ea5cf97a4e38055ea7c1
|
| 3 |
+
size 1018904
|
parrot/lib/python3.10/site-packages/gitdb/db/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (313 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/gitdb/db/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (9.68 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/gitdb/db/__pycache__/ref.cpython-310.pyc
ADDED
|
Binary file (2.37 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/gitdb/test/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: https://opensource.org/license/bsd-3-clause/
|
parrot/lib/python3.10/site-packages/gitdb/test/test_example.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
"""Module with examples from the tutorial section of the docs"""
|
| 6 |
+
import os
|
| 7 |
+
from gitdb.test.lib import TestBase
|
| 8 |
+
from gitdb import IStream
|
| 9 |
+
from gitdb.db import LooseObjectDB
|
| 10 |
+
|
| 11 |
+
from io import BytesIO
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class TestExamples(TestBase):
|
| 15 |
+
|
| 16 |
+
def test_base(self):
|
| 17 |
+
ldb = LooseObjectDB(os.path.join(self.gitrepopath, 'objects'))
|
| 18 |
+
|
| 19 |
+
for sha1 in ldb.sha_iter():
|
| 20 |
+
oinfo = ldb.info(sha1)
|
| 21 |
+
ostream = ldb.stream(sha1)
|
| 22 |
+
assert oinfo[:3] == ostream[:3]
|
| 23 |
+
|
| 24 |
+
assert len(ostream.read()) == ostream.size
|
| 25 |
+
assert ldb.has_object(oinfo.binsha)
|
| 26 |
+
# END for each sha in database
|
| 27 |
+
# assure we close all files
|
| 28 |
+
try:
|
| 29 |
+
del(ostream)
|
| 30 |
+
del(oinfo)
|
| 31 |
+
except UnboundLocalError:
|
| 32 |
+
pass
|
| 33 |
+
# END ignore exception if there are no loose objects
|
| 34 |
+
|
| 35 |
+
data = b"my data"
|
| 36 |
+
istream = IStream("blob", len(data), BytesIO(data))
|
| 37 |
+
|
| 38 |
+
# the object does not yet have a sha
|
| 39 |
+
assert istream.binsha is None
|
| 40 |
+
ldb.store(istream)
|
| 41 |
+
# now the sha is set
|
| 42 |
+
assert len(istream.binsha) == 20
|
| 43 |
+
assert ldb.has_object(istream.binsha)
|
parrot/lib/python3.10/site-packages/gitdb/test/test_pack.py
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
"""Test everything about packs reading and writing"""
|
| 6 |
+
from gitdb.test.lib import (
|
| 7 |
+
TestBase,
|
| 8 |
+
with_rw_directory,
|
| 9 |
+
fixture_path
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
from gitdb.stream import DeltaApplyReader
|
| 13 |
+
|
| 14 |
+
from gitdb.pack import (
|
| 15 |
+
PackEntity,
|
| 16 |
+
PackIndexFile,
|
| 17 |
+
PackFile
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
from gitdb.base import (
|
| 21 |
+
OInfo,
|
| 22 |
+
OStream,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
from gitdb.fun import delta_types
|
| 26 |
+
from gitdb.exc import UnsupportedOperation
|
| 27 |
+
from gitdb.util import to_bin_sha
|
| 28 |
+
|
| 29 |
+
import pytest
|
| 30 |
+
|
| 31 |
+
import os
|
| 32 |
+
import tempfile
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
#{ Utilities
|
| 36 |
+
def bin_sha_from_filename(filename):
|
| 37 |
+
return to_bin_sha(os.path.splitext(os.path.basename(filename))[0][5:])
|
| 38 |
+
#} END utilities
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class TestPack(TestBase):
|
| 42 |
+
|
| 43 |
+
packindexfile_v1 = (fixture_path('packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.idx'), 1, 67)
|
| 44 |
+
packindexfile_v2 = (fixture_path('packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.idx'), 2, 30)
|
| 45 |
+
packindexfile_v2_3_ascii = (fixture_path('packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.idx'), 2, 42)
|
| 46 |
+
packfile_v2_1 = (fixture_path('packs/pack-c0438c19fb16422b6bbcce24387b3264416d485b.pack'), 2, packindexfile_v1[2])
|
| 47 |
+
packfile_v2_2 = (fixture_path('packs/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack'), 2, packindexfile_v2[2])
|
| 48 |
+
packfile_v2_3_ascii = (
|
| 49 |
+
fixture_path('packs/pack-a2bf8e71d8c18879e499335762dd95119d93d9f1.pack'), 2, packindexfile_v2_3_ascii[2])
|
| 50 |
+
|
| 51 |
+
def _assert_index_file(self, index, version, size):
|
| 52 |
+
assert index.packfile_checksum() != index.indexfile_checksum()
|
| 53 |
+
assert len(index.packfile_checksum()) == 20
|
| 54 |
+
assert len(index.indexfile_checksum()) == 20
|
| 55 |
+
assert index.version() == version
|
| 56 |
+
assert index.size() == size
|
| 57 |
+
assert len(index.offsets()) == size
|
| 58 |
+
|
| 59 |
+
# get all data of all objects
|
| 60 |
+
for oidx in range(index.size()):
|
| 61 |
+
sha = index.sha(oidx)
|
| 62 |
+
assert oidx == index.sha_to_index(sha)
|
| 63 |
+
|
| 64 |
+
entry = index.entry(oidx)
|
| 65 |
+
assert len(entry) == 3
|
| 66 |
+
|
| 67 |
+
assert entry[0] == index.offset(oidx)
|
| 68 |
+
assert entry[1] == sha
|
| 69 |
+
assert entry[2] == index.crc(oidx)
|
| 70 |
+
|
| 71 |
+
# verify partial sha
|
| 72 |
+
for l in (4, 8, 11, 17, 20):
|
| 73 |
+
assert index.partial_sha_to_index(sha[:l], l * 2) == oidx
|
| 74 |
+
|
| 75 |
+
# END for each object index in indexfile
|
| 76 |
+
self.assertRaises(ValueError, index.partial_sha_to_index, "\0", 2)
|
| 77 |
+
|
| 78 |
+
def _assert_pack_file(self, pack, version, size):
|
| 79 |
+
assert pack.version() == 2
|
| 80 |
+
assert pack.size() == size
|
| 81 |
+
assert len(pack.checksum()) == 20
|
| 82 |
+
|
| 83 |
+
num_obj = 0
|
| 84 |
+
for obj in pack.stream_iter():
|
| 85 |
+
num_obj += 1
|
| 86 |
+
info = pack.info(obj.pack_offset)
|
| 87 |
+
stream = pack.stream(obj.pack_offset)
|
| 88 |
+
|
| 89 |
+
assert info.pack_offset == stream.pack_offset
|
| 90 |
+
assert info.type_id == stream.type_id
|
| 91 |
+
assert hasattr(stream, 'read')
|
| 92 |
+
|
| 93 |
+
# it should be possible to read from both streams
|
| 94 |
+
assert obj.read() == stream.read()
|
| 95 |
+
|
| 96 |
+
streams = pack.collect_streams(obj.pack_offset)
|
| 97 |
+
assert streams
|
| 98 |
+
|
| 99 |
+
# read the stream
|
| 100 |
+
try:
|
| 101 |
+
dstream = DeltaApplyReader.new(streams)
|
| 102 |
+
except ValueError:
|
| 103 |
+
# ignore these, old git versions use only ref deltas,
|
| 104 |
+
# which we haven't resolved ( as we are without an index )
|
| 105 |
+
# Also ignore non-delta streams
|
| 106 |
+
continue
|
| 107 |
+
# END get deltastream
|
| 108 |
+
|
| 109 |
+
# read all
|
| 110 |
+
data = dstream.read()
|
| 111 |
+
assert len(data) == dstream.size
|
| 112 |
+
|
| 113 |
+
# test seek
|
| 114 |
+
dstream.seek(0)
|
| 115 |
+
assert dstream.read() == data
|
| 116 |
+
|
| 117 |
+
# read chunks
|
| 118 |
+
# NOTE: the current implementation is safe, it basically transfers
|
| 119 |
+
# all calls to the underlying memory map
|
| 120 |
+
|
| 121 |
+
# END for each object
|
| 122 |
+
assert num_obj == size
|
| 123 |
+
|
| 124 |
+
def test_pack_index(self):
|
| 125 |
+
# check version 1 and 2
|
| 126 |
+
for indexfile, version, size in (self.packindexfile_v1, self.packindexfile_v2):
|
| 127 |
+
index = PackIndexFile(indexfile)
|
| 128 |
+
self._assert_index_file(index, version, size)
|
| 129 |
+
# END run tests
|
| 130 |
+
|
| 131 |
+
def test_pack(self):
|
| 132 |
+
# there is this special version 3, but apparently its like 2 ...
|
| 133 |
+
for packfile, version, size in (self.packfile_v2_3_ascii, self.packfile_v2_1, self.packfile_v2_2):
|
| 134 |
+
pack = PackFile(packfile)
|
| 135 |
+
self._assert_pack_file(pack, version, size)
|
| 136 |
+
# END for each pack to test
|
| 137 |
+
|
| 138 |
+
@with_rw_directory
|
| 139 |
+
def test_pack_entity(self, rw_dir):
|
| 140 |
+
pack_objs = list()
|
| 141 |
+
for packinfo, indexinfo in ((self.packfile_v2_1, self.packindexfile_v1),
|
| 142 |
+
(self.packfile_v2_2, self.packindexfile_v2),
|
| 143 |
+
(self.packfile_v2_3_ascii, self.packindexfile_v2_3_ascii)):
|
| 144 |
+
packfile, version, size = packinfo
|
| 145 |
+
indexfile, version, size = indexinfo
|
| 146 |
+
entity = PackEntity(packfile)
|
| 147 |
+
assert entity.pack().path() == packfile
|
| 148 |
+
assert entity.index().path() == indexfile
|
| 149 |
+
pack_objs.extend(entity.stream_iter())
|
| 150 |
+
|
| 151 |
+
count = 0
|
| 152 |
+
for info, stream in zip(entity.info_iter(), entity.stream_iter()):
|
| 153 |
+
count += 1
|
| 154 |
+
assert info.binsha == stream.binsha
|
| 155 |
+
assert len(info.binsha) == 20
|
| 156 |
+
assert info.type_id == stream.type_id
|
| 157 |
+
assert info.size == stream.size
|
| 158 |
+
|
| 159 |
+
# we return fully resolved items, which is implied by the sha centric access
|
| 160 |
+
assert not info.type_id in delta_types
|
| 161 |
+
|
| 162 |
+
# try all calls
|
| 163 |
+
assert len(entity.collect_streams(info.binsha))
|
| 164 |
+
oinfo = entity.info(info.binsha)
|
| 165 |
+
assert isinstance(oinfo, OInfo)
|
| 166 |
+
assert oinfo.binsha is not None
|
| 167 |
+
ostream = entity.stream(info.binsha)
|
| 168 |
+
assert isinstance(ostream, OStream)
|
| 169 |
+
assert ostream.binsha is not None
|
| 170 |
+
|
| 171 |
+
# verify the stream
|
| 172 |
+
try:
|
| 173 |
+
assert entity.is_valid_stream(info.binsha, use_crc=True)
|
| 174 |
+
except UnsupportedOperation:
|
| 175 |
+
pass
|
| 176 |
+
# END ignore version issues
|
| 177 |
+
assert entity.is_valid_stream(info.binsha, use_crc=False)
|
| 178 |
+
# END for each info, stream tuple
|
| 179 |
+
assert count == size
|
| 180 |
+
|
| 181 |
+
# END for each entity
|
| 182 |
+
|
| 183 |
+
# pack writing - write all packs into one
|
| 184 |
+
# index path can be None
|
| 185 |
+
pack_path1 = tempfile.mktemp('', "pack1", rw_dir)
|
| 186 |
+
pack_path2 = tempfile.mktemp('', "pack2", rw_dir)
|
| 187 |
+
index_path = tempfile.mktemp('', 'index', rw_dir)
|
| 188 |
+
iteration = 0
|
| 189 |
+
|
| 190 |
+
def rewind_streams():
|
| 191 |
+
for obj in pack_objs:
|
| 192 |
+
obj.stream.seek(0)
|
| 193 |
+
# END utility
|
| 194 |
+
for ppath, ipath, num_obj in zip((pack_path1, pack_path2),
|
| 195 |
+
(index_path, None),
|
| 196 |
+
(len(pack_objs), None)):
|
| 197 |
+
iwrite = None
|
| 198 |
+
if ipath:
|
| 199 |
+
ifile = open(ipath, 'wb')
|
| 200 |
+
iwrite = ifile.write
|
| 201 |
+
# END handle ip
|
| 202 |
+
|
| 203 |
+
# make sure we rewind the streams ... we work on the same objects over and over again
|
| 204 |
+
if iteration > 0:
|
| 205 |
+
rewind_streams()
|
| 206 |
+
# END rewind streams
|
| 207 |
+
iteration += 1
|
| 208 |
+
|
| 209 |
+
with open(ppath, 'wb') as pfile:
|
| 210 |
+
pack_sha, index_sha = PackEntity.write_pack(pack_objs, pfile.write, iwrite, object_count=num_obj)
|
| 211 |
+
assert os.path.getsize(ppath) > 100
|
| 212 |
+
|
| 213 |
+
# verify pack
|
| 214 |
+
pf = PackFile(ppath)
|
| 215 |
+
assert pf.size() == len(pack_objs)
|
| 216 |
+
assert pf.version() == PackFile.pack_version_default
|
| 217 |
+
assert pf.checksum() == pack_sha
|
| 218 |
+
pf.close()
|
| 219 |
+
|
| 220 |
+
# verify index
|
| 221 |
+
if ipath is not None:
|
| 222 |
+
ifile.close()
|
| 223 |
+
assert os.path.getsize(ipath) > 100
|
| 224 |
+
idx = PackIndexFile(ipath)
|
| 225 |
+
assert idx.version() == PackIndexFile.index_version_default
|
| 226 |
+
assert idx.packfile_checksum() == pack_sha
|
| 227 |
+
assert idx.indexfile_checksum() == index_sha
|
| 228 |
+
assert idx.size() == len(pack_objs)
|
| 229 |
+
idx.close()
|
| 230 |
+
# END verify files exist
|
| 231 |
+
# END for each packpath, indexpath pair
|
| 232 |
+
|
| 233 |
+
# verify the packs thoroughly
|
| 234 |
+
rewind_streams()
|
| 235 |
+
entity = PackEntity.create(pack_objs, rw_dir)
|
| 236 |
+
count = 0
|
| 237 |
+
for info in entity.info_iter():
|
| 238 |
+
count += 1
|
| 239 |
+
for use_crc in range(2):
|
| 240 |
+
assert entity.is_valid_stream(info.binsha, use_crc)
|
| 241 |
+
# END for each crc mode
|
| 242 |
+
# END for each info
|
| 243 |
+
assert count == len(pack_objs)
|
| 244 |
+
entity.close()
|
| 245 |
+
|
| 246 |
+
def test_pack_64(self):
|
| 247 |
+
# TODO: hex-edit a pack helping us to verify that we can handle 64 byte offsets
|
| 248 |
+
# of course without really needing such a huge pack
|
| 249 |
+
pytest.skip('not implemented')
|
parrot/lib/python3.10/site-packages/gitdb/utils/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (163 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/gitdb/utils/__pycache__/encoding.cpython-310.pyc
ADDED
|
Binary file (541 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/gitdb/utils/encoding.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def force_bytes(data, encoding="utf-8"):
|
| 2 |
+
if isinstance(data, bytes):
|
| 3 |
+
return data
|
| 4 |
+
|
| 5 |
+
if isinstance(data, str):
|
| 6 |
+
return data.encode(encoding)
|
| 7 |
+
|
| 8 |
+
return data
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def force_text(data, encoding="utf-8"):
|
| 12 |
+
if isinstance(data, str):
|
| 13 |
+
return data
|
| 14 |
+
|
| 15 |
+
if isinstance(data, bytes):
|
| 16 |
+
return data.decode(encoding)
|
| 17 |
+
|
| 18 |
+
return str(data, encoding)
|
parrot/lib/python3.10/site-packages/huggingface_hub/__init__.py
ADDED
|
@@ -0,0 +1,1431 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
# ***********
|
| 16 |
+
# `huggingface_hub` init has 2 modes:
|
| 17 |
+
# - Normal usage:
|
| 18 |
+
# If imported to use it, all modules and functions are lazy-loaded. This means
|
| 19 |
+
# they exist at top level in module but are imported only the first time they are
|
| 20 |
+
# used. This way, `from huggingface_hub import something` will import `something`
|
| 21 |
+
# quickly without the hassle of importing all the features from `huggingface_hub`.
|
| 22 |
+
# - Static check:
|
| 23 |
+
# If statically analyzed, all modules and functions are loaded normally. This way
|
| 24 |
+
# static typing check works properly as well as autocomplete in text editors and
|
| 25 |
+
# IDEs.
|
| 26 |
+
#
|
| 27 |
+
# The static model imports are done inside the `if TYPE_CHECKING:` statement at
|
| 28 |
+
# the bottom of this file. Since module/functions imports are duplicated, it is
|
| 29 |
+
# mandatory to make sure to add them twice when adding one. This is checked in the
|
| 30 |
+
# `make quality` command.
|
| 31 |
+
#
|
| 32 |
+
# To update the static imports, please run the following command and commit the changes.
|
| 33 |
+
# ```
|
| 34 |
+
# # Use script
|
| 35 |
+
# python utils/check_static_imports.py --update-file
|
| 36 |
+
#
|
| 37 |
+
# # Or run style on codebase
|
| 38 |
+
# make style
|
| 39 |
+
# ```
|
| 40 |
+
#
|
| 41 |
+
# ***********
|
| 42 |
+
# Lazy loader vendored from https://github.com/scientific-python/lazy_loader
|
| 43 |
+
import importlib
|
| 44 |
+
import os
|
| 45 |
+
import sys
|
| 46 |
+
from typing import TYPE_CHECKING
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
__version__ = "0.29.1"
|
| 50 |
+
|
| 51 |
+
# Alphabetical order of definitions is ensured in tests
|
| 52 |
+
# WARNING: any comment added in this dictionary definition will be lost when
|
| 53 |
+
# re-generating the file !
|
| 54 |
+
_SUBMOD_ATTRS = {
|
| 55 |
+
"_commit_scheduler": [
|
| 56 |
+
"CommitScheduler",
|
| 57 |
+
],
|
| 58 |
+
"_inference_endpoints": [
|
| 59 |
+
"InferenceEndpoint",
|
| 60 |
+
"InferenceEndpointError",
|
| 61 |
+
"InferenceEndpointStatus",
|
| 62 |
+
"InferenceEndpointTimeoutError",
|
| 63 |
+
"InferenceEndpointType",
|
| 64 |
+
],
|
| 65 |
+
"_login": [
|
| 66 |
+
"auth_list",
|
| 67 |
+
"auth_switch",
|
| 68 |
+
"interpreter_login",
|
| 69 |
+
"login",
|
| 70 |
+
"logout",
|
| 71 |
+
"notebook_login",
|
| 72 |
+
],
|
| 73 |
+
"_snapshot_download": [
|
| 74 |
+
"snapshot_download",
|
| 75 |
+
],
|
| 76 |
+
"_space_api": [
|
| 77 |
+
"SpaceHardware",
|
| 78 |
+
"SpaceRuntime",
|
| 79 |
+
"SpaceStage",
|
| 80 |
+
"SpaceStorage",
|
| 81 |
+
"SpaceVariable",
|
| 82 |
+
],
|
| 83 |
+
"_tensorboard_logger": [
|
| 84 |
+
"HFSummaryWriter",
|
| 85 |
+
],
|
| 86 |
+
"_webhooks_payload": [
|
| 87 |
+
"WebhookPayload",
|
| 88 |
+
"WebhookPayloadComment",
|
| 89 |
+
"WebhookPayloadDiscussion",
|
| 90 |
+
"WebhookPayloadDiscussionChanges",
|
| 91 |
+
"WebhookPayloadEvent",
|
| 92 |
+
"WebhookPayloadMovedTo",
|
| 93 |
+
"WebhookPayloadRepo",
|
| 94 |
+
"WebhookPayloadUrl",
|
| 95 |
+
"WebhookPayloadWebhook",
|
| 96 |
+
],
|
| 97 |
+
"_webhooks_server": [
|
| 98 |
+
"WebhooksServer",
|
| 99 |
+
"webhook_endpoint",
|
| 100 |
+
],
|
| 101 |
+
"community": [
|
| 102 |
+
"Discussion",
|
| 103 |
+
"DiscussionComment",
|
| 104 |
+
"DiscussionCommit",
|
| 105 |
+
"DiscussionEvent",
|
| 106 |
+
"DiscussionStatusChange",
|
| 107 |
+
"DiscussionTitleChange",
|
| 108 |
+
"DiscussionWithDetails",
|
| 109 |
+
],
|
| 110 |
+
"constants": [
|
| 111 |
+
"CONFIG_NAME",
|
| 112 |
+
"FLAX_WEIGHTS_NAME",
|
| 113 |
+
"HUGGINGFACE_CO_URL_HOME",
|
| 114 |
+
"HUGGINGFACE_CO_URL_TEMPLATE",
|
| 115 |
+
"PYTORCH_WEIGHTS_NAME",
|
| 116 |
+
"REPO_TYPE_DATASET",
|
| 117 |
+
"REPO_TYPE_MODEL",
|
| 118 |
+
"REPO_TYPE_SPACE",
|
| 119 |
+
"TF2_WEIGHTS_NAME",
|
| 120 |
+
"TF_WEIGHTS_NAME",
|
| 121 |
+
],
|
| 122 |
+
"fastai_utils": [
|
| 123 |
+
"_save_pretrained_fastai",
|
| 124 |
+
"from_pretrained_fastai",
|
| 125 |
+
"push_to_hub_fastai",
|
| 126 |
+
],
|
| 127 |
+
"file_download": [
|
| 128 |
+
"HfFileMetadata",
|
| 129 |
+
"_CACHED_NO_EXIST",
|
| 130 |
+
"get_hf_file_metadata",
|
| 131 |
+
"hf_hub_download",
|
| 132 |
+
"hf_hub_url",
|
| 133 |
+
"try_to_load_from_cache",
|
| 134 |
+
],
|
| 135 |
+
"hf_api": [
|
| 136 |
+
"Collection",
|
| 137 |
+
"CollectionItem",
|
| 138 |
+
"CommitInfo",
|
| 139 |
+
"CommitOperation",
|
| 140 |
+
"CommitOperationAdd",
|
| 141 |
+
"CommitOperationCopy",
|
| 142 |
+
"CommitOperationDelete",
|
| 143 |
+
"DatasetInfo",
|
| 144 |
+
"GitCommitInfo",
|
| 145 |
+
"GitRefInfo",
|
| 146 |
+
"GitRefs",
|
| 147 |
+
"HfApi",
|
| 148 |
+
"ModelInfo",
|
| 149 |
+
"RepoUrl",
|
| 150 |
+
"SpaceInfo",
|
| 151 |
+
"User",
|
| 152 |
+
"UserLikes",
|
| 153 |
+
"WebhookInfo",
|
| 154 |
+
"WebhookWatchedItem",
|
| 155 |
+
"accept_access_request",
|
| 156 |
+
"add_collection_item",
|
| 157 |
+
"add_space_secret",
|
| 158 |
+
"add_space_variable",
|
| 159 |
+
"auth_check",
|
| 160 |
+
"cancel_access_request",
|
| 161 |
+
"change_discussion_status",
|
| 162 |
+
"comment_discussion",
|
| 163 |
+
"create_branch",
|
| 164 |
+
"create_collection",
|
| 165 |
+
"create_commit",
|
| 166 |
+
"create_discussion",
|
| 167 |
+
"create_inference_endpoint",
|
| 168 |
+
"create_pull_request",
|
| 169 |
+
"create_repo",
|
| 170 |
+
"create_tag",
|
| 171 |
+
"create_webhook",
|
| 172 |
+
"dataset_info",
|
| 173 |
+
"delete_branch",
|
| 174 |
+
"delete_collection",
|
| 175 |
+
"delete_collection_item",
|
| 176 |
+
"delete_file",
|
| 177 |
+
"delete_folder",
|
| 178 |
+
"delete_inference_endpoint",
|
| 179 |
+
"delete_repo",
|
| 180 |
+
"delete_space_secret",
|
| 181 |
+
"delete_space_storage",
|
| 182 |
+
"delete_space_variable",
|
| 183 |
+
"delete_tag",
|
| 184 |
+
"delete_webhook",
|
| 185 |
+
"disable_webhook",
|
| 186 |
+
"duplicate_space",
|
| 187 |
+
"edit_discussion_comment",
|
| 188 |
+
"enable_webhook",
|
| 189 |
+
"file_exists",
|
| 190 |
+
"get_collection",
|
| 191 |
+
"get_dataset_tags",
|
| 192 |
+
"get_discussion_details",
|
| 193 |
+
"get_full_repo_name",
|
| 194 |
+
"get_inference_endpoint",
|
| 195 |
+
"get_model_tags",
|
| 196 |
+
"get_paths_info",
|
| 197 |
+
"get_repo_discussions",
|
| 198 |
+
"get_safetensors_metadata",
|
| 199 |
+
"get_space_runtime",
|
| 200 |
+
"get_space_variables",
|
| 201 |
+
"get_token_permission",
|
| 202 |
+
"get_user_overview",
|
| 203 |
+
"get_webhook",
|
| 204 |
+
"grant_access",
|
| 205 |
+
"list_accepted_access_requests",
|
| 206 |
+
"list_collections",
|
| 207 |
+
"list_datasets",
|
| 208 |
+
"list_inference_endpoints",
|
| 209 |
+
"list_liked_repos",
|
| 210 |
+
"list_models",
|
| 211 |
+
"list_organization_members",
|
| 212 |
+
"list_papers",
|
| 213 |
+
"list_pending_access_requests",
|
| 214 |
+
"list_rejected_access_requests",
|
| 215 |
+
"list_repo_commits",
|
| 216 |
+
"list_repo_files",
|
| 217 |
+
"list_repo_likers",
|
| 218 |
+
"list_repo_refs",
|
| 219 |
+
"list_repo_tree",
|
| 220 |
+
"list_spaces",
|
| 221 |
+
"list_user_followers",
|
| 222 |
+
"list_user_following",
|
| 223 |
+
"list_webhooks",
|
| 224 |
+
"merge_pull_request",
|
| 225 |
+
"model_info",
|
| 226 |
+
"move_repo",
|
| 227 |
+
"paper_info",
|
| 228 |
+
"parse_safetensors_file_metadata",
|
| 229 |
+
"pause_inference_endpoint",
|
| 230 |
+
"pause_space",
|
| 231 |
+
"preupload_lfs_files",
|
| 232 |
+
"reject_access_request",
|
| 233 |
+
"rename_discussion",
|
| 234 |
+
"repo_exists",
|
| 235 |
+
"repo_info",
|
| 236 |
+
"repo_type_and_id_from_hf_id",
|
| 237 |
+
"request_space_hardware",
|
| 238 |
+
"request_space_storage",
|
| 239 |
+
"restart_space",
|
| 240 |
+
"resume_inference_endpoint",
|
| 241 |
+
"revision_exists",
|
| 242 |
+
"run_as_future",
|
| 243 |
+
"scale_to_zero_inference_endpoint",
|
| 244 |
+
"set_space_sleep_time",
|
| 245 |
+
"space_info",
|
| 246 |
+
"super_squash_history",
|
| 247 |
+
"unlike",
|
| 248 |
+
"update_collection_item",
|
| 249 |
+
"update_collection_metadata",
|
| 250 |
+
"update_inference_endpoint",
|
| 251 |
+
"update_repo_settings",
|
| 252 |
+
"update_repo_visibility",
|
| 253 |
+
"update_webhook",
|
| 254 |
+
"upload_file",
|
| 255 |
+
"upload_folder",
|
| 256 |
+
"upload_large_folder",
|
| 257 |
+
"whoami",
|
| 258 |
+
],
|
| 259 |
+
"hf_file_system": [
|
| 260 |
+
"HfFileSystem",
|
| 261 |
+
"HfFileSystemFile",
|
| 262 |
+
"HfFileSystemResolvedPath",
|
| 263 |
+
"HfFileSystemStreamFile",
|
| 264 |
+
],
|
| 265 |
+
"hub_mixin": [
|
| 266 |
+
"ModelHubMixin",
|
| 267 |
+
"PyTorchModelHubMixin",
|
| 268 |
+
],
|
| 269 |
+
"inference._client": [
|
| 270 |
+
"InferenceClient",
|
| 271 |
+
"InferenceTimeoutError",
|
| 272 |
+
],
|
| 273 |
+
"inference._generated._async_client": [
|
| 274 |
+
"AsyncInferenceClient",
|
| 275 |
+
],
|
| 276 |
+
"inference._generated.types": [
|
| 277 |
+
"AudioClassificationInput",
|
| 278 |
+
"AudioClassificationOutputElement",
|
| 279 |
+
"AudioClassificationOutputTransform",
|
| 280 |
+
"AudioClassificationParameters",
|
| 281 |
+
"AudioToAudioInput",
|
| 282 |
+
"AudioToAudioOutputElement",
|
| 283 |
+
"AutomaticSpeechRecognitionEarlyStoppingEnum",
|
| 284 |
+
"AutomaticSpeechRecognitionGenerationParameters",
|
| 285 |
+
"AutomaticSpeechRecognitionInput",
|
| 286 |
+
"AutomaticSpeechRecognitionOutput",
|
| 287 |
+
"AutomaticSpeechRecognitionOutputChunk",
|
| 288 |
+
"AutomaticSpeechRecognitionParameters",
|
| 289 |
+
"ChatCompletionInput",
|
| 290 |
+
"ChatCompletionInputFunctionDefinition",
|
| 291 |
+
"ChatCompletionInputFunctionName",
|
| 292 |
+
"ChatCompletionInputGrammarType",
|
| 293 |
+
"ChatCompletionInputGrammarTypeType",
|
| 294 |
+
"ChatCompletionInputMessage",
|
| 295 |
+
"ChatCompletionInputMessageChunk",
|
| 296 |
+
"ChatCompletionInputMessageChunkType",
|
| 297 |
+
"ChatCompletionInputStreamOptions",
|
| 298 |
+
"ChatCompletionInputTool",
|
| 299 |
+
"ChatCompletionInputToolChoiceClass",
|
| 300 |
+
"ChatCompletionInputToolChoiceEnum",
|
| 301 |
+
"ChatCompletionInputURL",
|
| 302 |
+
"ChatCompletionOutput",
|
| 303 |
+
"ChatCompletionOutputComplete",
|
| 304 |
+
"ChatCompletionOutputFunctionDefinition",
|
| 305 |
+
"ChatCompletionOutputLogprob",
|
| 306 |
+
"ChatCompletionOutputLogprobs",
|
| 307 |
+
"ChatCompletionOutputMessage",
|
| 308 |
+
"ChatCompletionOutputToolCall",
|
| 309 |
+
"ChatCompletionOutputTopLogprob",
|
| 310 |
+
"ChatCompletionOutputUsage",
|
| 311 |
+
"ChatCompletionStreamOutput",
|
| 312 |
+
"ChatCompletionStreamOutputChoice",
|
| 313 |
+
"ChatCompletionStreamOutputDelta",
|
| 314 |
+
"ChatCompletionStreamOutputDeltaToolCall",
|
| 315 |
+
"ChatCompletionStreamOutputFunction",
|
| 316 |
+
"ChatCompletionStreamOutputLogprob",
|
| 317 |
+
"ChatCompletionStreamOutputLogprobs",
|
| 318 |
+
"ChatCompletionStreamOutputTopLogprob",
|
| 319 |
+
"ChatCompletionStreamOutputUsage",
|
| 320 |
+
"DepthEstimationInput",
|
| 321 |
+
"DepthEstimationOutput",
|
| 322 |
+
"DocumentQuestionAnsweringInput",
|
| 323 |
+
"DocumentQuestionAnsweringInputData",
|
| 324 |
+
"DocumentQuestionAnsweringOutputElement",
|
| 325 |
+
"DocumentQuestionAnsweringParameters",
|
| 326 |
+
"FeatureExtractionInput",
|
| 327 |
+
"FeatureExtractionInputTruncationDirection",
|
| 328 |
+
"FillMaskInput",
|
| 329 |
+
"FillMaskOutputElement",
|
| 330 |
+
"FillMaskParameters",
|
| 331 |
+
"ImageClassificationInput",
|
| 332 |
+
"ImageClassificationOutputElement",
|
| 333 |
+
"ImageClassificationOutputTransform",
|
| 334 |
+
"ImageClassificationParameters",
|
| 335 |
+
"ImageSegmentationInput",
|
| 336 |
+
"ImageSegmentationOutputElement",
|
| 337 |
+
"ImageSegmentationParameters",
|
| 338 |
+
"ImageSegmentationSubtask",
|
| 339 |
+
"ImageToImageInput",
|
| 340 |
+
"ImageToImageOutput",
|
| 341 |
+
"ImageToImageParameters",
|
| 342 |
+
"ImageToImageTargetSize",
|
| 343 |
+
"ImageToTextEarlyStoppingEnum",
|
| 344 |
+
"ImageToTextGenerationParameters",
|
| 345 |
+
"ImageToTextInput",
|
| 346 |
+
"ImageToTextOutput",
|
| 347 |
+
"ImageToTextParameters",
|
| 348 |
+
"ObjectDetectionBoundingBox",
|
| 349 |
+
"ObjectDetectionInput",
|
| 350 |
+
"ObjectDetectionOutputElement",
|
| 351 |
+
"ObjectDetectionParameters",
|
| 352 |
+
"Padding",
|
| 353 |
+
"QuestionAnsweringInput",
|
| 354 |
+
"QuestionAnsweringInputData",
|
| 355 |
+
"QuestionAnsweringOutputElement",
|
| 356 |
+
"QuestionAnsweringParameters",
|
| 357 |
+
"SentenceSimilarityInput",
|
| 358 |
+
"SentenceSimilarityInputData",
|
| 359 |
+
"SummarizationInput",
|
| 360 |
+
"SummarizationOutput",
|
| 361 |
+
"SummarizationParameters",
|
| 362 |
+
"SummarizationTruncationStrategy",
|
| 363 |
+
"TableQuestionAnsweringInput",
|
| 364 |
+
"TableQuestionAnsweringInputData",
|
| 365 |
+
"TableQuestionAnsweringOutputElement",
|
| 366 |
+
"TableQuestionAnsweringParameters",
|
| 367 |
+
"Text2TextGenerationInput",
|
| 368 |
+
"Text2TextGenerationOutput",
|
| 369 |
+
"Text2TextGenerationParameters",
|
| 370 |
+
"Text2TextGenerationTruncationStrategy",
|
| 371 |
+
"TextClassificationInput",
|
| 372 |
+
"TextClassificationOutputElement",
|
| 373 |
+
"TextClassificationOutputTransform",
|
| 374 |
+
"TextClassificationParameters",
|
| 375 |
+
"TextGenerationInput",
|
| 376 |
+
"TextGenerationInputGenerateParameters",
|
| 377 |
+
"TextGenerationInputGrammarType",
|
| 378 |
+
"TextGenerationOutput",
|
| 379 |
+
"TextGenerationOutputBestOfSequence",
|
| 380 |
+
"TextGenerationOutputDetails",
|
| 381 |
+
"TextGenerationOutputFinishReason",
|
| 382 |
+
"TextGenerationOutputPrefillToken",
|
| 383 |
+
"TextGenerationOutputToken",
|
| 384 |
+
"TextGenerationStreamOutput",
|
| 385 |
+
"TextGenerationStreamOutputStreamDetails",
|
| 386 |
+
"TextGenerationStreamOutputToken",
|
| 387 |
+
"TextToAudioEarlyStoppingEnum",
|
| 388 |
+
"TextToAudioGenerationParameters",
|
| 389 |
+
"TextToAudioInput",
|
| 390 |
+
"TextToAudioOutput",
|
| 391 |
+
"TextToAudioParameters",
|
| 392 |
+
"TextToImageInput",
|
| 393 |
+
"TextToImageOutput",
|
| 394 |
+
"TextToImageParameters",
|
| 395 |
+
"TextToSpeechEarlyStoppingEnum",
|
| 396 |
+
"TextToSpeechGenerationParameters",
|
| 397 |
+
"TextToSpeechInput",
|
| 398 |
+
"TextToSpeechOutput",
|
| 399 |
+
"TextToSpeechParameters",
|
| 400 |
+
"TextToVideoInput",
|
| 401 |
+
"TextToVideoOutput",
|
| 402 |
+
"TextToVideoParameters",
|
| 403 |
+
"TokenClassificationAggregationStrategy",
|
| 404 |
+
"TokenClassificationInput",
|
| 405 |
+
"TokenClassificationOutputElement",
|
| 406 |
+
"TokenClassificationParameters",
|
| 407 |
+
"TranslationInput",
|
| 408 |
+
"TranslationOutput",
|
| 409 |
+
"TranslationParameters",
|
| 410 |
+
"TranslationTruncationStrategy",
|
| 411 |
+
"TypeEnum",
|
| 412 |
+
"VideoClassificationInput",
|
| 413 |
+
"VideoClassificationOutputElement",
|
| 414 |
+
"VideoClassificationOutputTransform",
|
| 415 |
+
"VideoClassificationParameters",
|
| 416 |
+
"VisualQuestionAnsweringInput",
|
| 417 |
+
"VisualQuestionAnsweringInputData",
|
| 418 |
+
"VisualQuestionAnsweringOutputElement",
|
| 419 |
+
"VisualQuestionAnsweringParameters",
|
| 420 |
+
"ZeroShotClassificationInput",
|
| 421 |
+
"ZeroShotClassificationOutputElement",
|
| 422 |
+
"ZeroShotClassificationParameters",
|
| 423 |
+
"ZeroShotImageClassificationInput",
|
| 424 |
+
"ZeroShotImageClassificationOutputElement",
|
| 425 |
+
"ZeroShotImageClassificationParameters",
|
| 426 |
+
"ZeroShotObjectDetectionBoundingBox",
|
| 427 |
+
"ZeroShotObjectDetectionInput",
|
| 428 |
+
"ZeroShotObjectDetectionOutputElement",
|
| 429 |
+
"ZeroShotObjectDetectionParameters",
|
| 430 |
+
],
|
| 431 |
+
"inference_api": [
|
| 432 |
+
"InferenceApi",
|
| 433 |
+
],
|
| 434 |
+
"keras_mixin": [
|
| 435 |
+
"KerasModelHubMixin",
|
| 436 |
+
"from_pretrained_keras",
|
| 437 |
+
"push_to_hub_keras",
|
| 438 |
+
"save_pretrained_keras",
|
| 439 |
+
],
|
| 440 |
+
"repocard": [
|
| 441 |
+
"DatasetCard",
|
| 442 |
+
"ModelCard",
|
| 443 |
+
"RepoCard",
|
| 444 |
+
"SpaceCard",
|
| 445 |
+
"metadata_eval_result",
|
| 446 |
+
"metadata_load",
|
| 447 |
+
"metadata_save",
|
| 448 |
+
"metadata_update",
|
| 449 |
+
],
|
| 450 |
+
"repocard_data": [
|
| 451 |
+
"CardData",
|
| 452 |
+
"DatasetCardData",
|
| 453 |
+
"EvalResult",
|
| 454 |
+
"ModelCardData",
|
| 455 |
+
"SpaceCardData",
|
| 456 |
+
],
|
| 457 |
+
"repository": [
|
| 458 |
+
"Repository",
|
| 459 |
+
],
|
| 460 |
+
"serialization": [
|
| 461 |
+
"StateDictSplit",
|
| 462 |
+
"get_tf_storage_size",
|
| 463 |
+
"get_torch_storage_id",
|
| 464 |
+
"get_torch_storage_size",
|
| 465 |
+
"load_state_dict_from_file",
|
| 466 |
+
"load_torch_model",
|
| 467 |
+
"save_torch_model",
|
| 468 |
+
"save_torch_state_dict",
|
| 469 |
+
"split_state_dict_into_shards_factory",
|
| 470 |
+
"split_tf_state_dict_into_shards",
|
| 471 |
+
"split_torch_state_dict_into_shards",
|
| 472 |
+
],
|
| 473 |
+
"serialization._dduf": [
|
| 474 |
+
"DDUFEntry",
|
| 475 |
+
"export_entries_as_dduf",
|
| 476 |
+
"export_folder_as_dduf",
|
| 477 |
+
"read_dduf_file",
|
| 478 |
+
],
|
| 479 |
+
"utils": [
|
| 480 |
+
"CacheNotFound",
|
| 481 |
+
"CachedFileInfo",
|
| 482 |
+
"CachedRepoInfo",
|
| 483 |
+
"CachedRevisionInfo",
|
| 484 |
+
"CorruptedCacheException",
|
| 485 |
+
"DeleteCacheStrategy",
|
| 486 |
+
"HFCacheInfo",
|
| 487 |
+
"HfFolder",
|
| 488 |
+
"cached_assets_path",
|
| 489 |
+
"configure_http_backend",
|
| 490 |
+
"dump_environment_info",
|
| 491 |
+
"get_session",
|
| 492 |
+
"get_token",
|
| 493 |
+
"logging",
|
| 494 |
+
"scan_cache_dir",
|
| 495 |
+
],
|
| 496 |
+
}
|
| 497 |
+
|
| 498 |
+
# WARNING: __all__ is generated automatically, Any manual edit will be lost when re-generating this file !
|
| 499 |
+
#
|
| 500 |
+
# To update the static imports, please run the following command and commit the changes.
|
| 501 |
+
# ```
|
| 502 |
+
# # Use script
|
| 503 |
+
# python utils/check_all_variable.py --update
|
| 504 |
+
#
|
| 505 |
+
# # Or run style on codebase
|
| 506 |
+
# make style
|
| 507 |
+
# ```
|
| 508 |
+
|
| 509 |
+
__all__ = [
|
| 510 |
+
"AsyncInferenceClient",
|
| 511 |
+
"AudioClassificationInput",
|
| 512 |
+
"AudioClassificationOutputElement",
|
| 513 |
+
"AudioClassificationOutputTransform",
|
| 514 |
+
"AudioClassificationParameters",
|
| 515 |
+
"AudioToAudioInput",
|
| 516 |
+
"AudioToAudioOutputElement",
|
| 517 |
+
"AutomaticSpeechRecognitionEarlyStoppingEnum",
|
| 518 |
+
"AutomaticSpeechRecognitionGenerationParameters",
|
| 519 |
+
"AutomaticSpeechRecognitionInput",
|
| 520 |
+
"AutomaticSpeechRecognitionOutput",
|
| 521 |
+
"AutomaticSpeechRecognitionOutputChunk",
|
| 522 |
+
"AutomaticSpeechRecognitionParameters",
|
| 523 |
+
"CONFIG_NAME",
|
| 524 |
+
"CacheNotFound",
|
| 525 |
+
"CachedFileInfo",
|
| 526 |
+
"CachedRepoInfo",
|
| 527 |
+
"CachedRevisionInfo",
|
| 528 |
+
"CardData",
|
| 529 |
+
"ChatCompletionInput",
|
| 530 |
+
"ChatCompletionInputFunctionDefinition",
|
| 531 |
+
"ChatCompletionInputFunctionName",
|
| 532 |
+
"ChatCompletionInputGrammarType",
|
| 533 |
+
"ChatCompletionInputGrammarTypeType",
|
| 534 |
+
"ChatCompletionInputMessage",
|
| 535 |
+
"ChatCompletionInputMessageChunk",
|
| 536 |
+
"ChatCompletionInputMessageChunkType",
|
| 537 |
+
"ChatCompletionInputStreamOptions",
|
| 538 |
+
"ChatCompletionInputTool",
|
| 539 |
+
"ChatCompletionInputToolChoiceClass",
|
| 540 |
+
"ChatCompletionInputToolChoiceEnum",
|
| 541 |
+
"ChatCompletionInputURL",
|
| 542 |
+
"ChatCompletionOutput",
|
| 543 |
+
"ChatCompletionOutputComplete",
|
| 544 |
+
"ChatCompletionOutputFunctionDefinition",
|
| 545 |
+
"ChatCompletionOutputLogprob",
|
| 546 |
+
"ChatCompletionOutputLogprobs",
|
| 547 |
+
"ChatCompletionOutputMessage",
|
| 548 |
+
"ChatCompletionOutputToolCall",
|
| 549 |
+
"ChatCompletionOutputTopLogprob",
|
| 550 |
+
"ChatCompletionOutputUsage",
|
| 551 |
+
"ChatCompletionStreamOutput",
|
| 552 |
+
"ChatCompletionStreamOutputChoice",
|
| 553 |
+
"ChatCompletionStreamOutputDelta",
|
| 554 |
+
"ChatCompletionStreamOutputDeltaToolCall",
|
| 555 |
+
"ChatCompletionStreamOutputFunction",
|
| 556 |
+
"ChatCompletionStreamOutputLogprob",
|
| 557 |
+
"ChatCompletionStreamOutputLogprobs",
|
| 558 |
+
"ChatCompletionStreamOutputTopLogprob",
|
| 559 |
+
"ChatCompletionStreamOutputUsage",
|
| 560 |
+
"Collection",
|
| 561 |
+
"CollectionItem",
|
| 562 |
+
"CommitInfo",
|
| 563 |
+
"CommitOperation",
|
| 564 |
+
"CommitOperationAdd",
|
| 565 |
+
"CommitOperationCopy",
|
| 566 |
+
"CommitOperationDelete",
|
| 567 |
+
"CommitScheduler",
|
| 568 |
+
"CorruptedCacheException",
|
| 569 |
+
"DDUFEntry",
|
| 570 |
+
"DatasetCard",
|
| 571 |
+
"DatasetCardData",
|
| 572 |
+
"DatasetInfo",
|
| 573 |
+
"DeleteCacheStrategy",
|
| 574 |
+
"DepthEstimationInput",
|
| 575 |
+
"DepthEstimationOutput",
|
| 576 |
+
"Discussion",
|
| 577 |
+
"DiscussionComment",
|
| 578 |
+
"DiscussionCommit",
|
| 579 |
+
"DiscussionEvent",
|
| 580 |
+
"DiscussionStatusChange",
|
| 581 |
+
"DiscussionTitleChange",
|
| 582 |
+
"DiscussionWithDetails",
|
| 583 |
+
"DocumentQuestionAnsweringInput",
|
| 584 |
+
"DocumentQuestionAnsweringInputData",
|
| 585 |
+
"DocumentQuestionAnsweringOutputElement",
|
| 586 |
+
"DocumentQuestionAnsweringParameters",
|
| 587 |
+
"EvalResult",
|
| 588 |
+
"FLAX_WEIGHTS_NAME",
|
| 589 |
+
"FeatureExtractionInput",
|
| 590 |
+
"FeatureExtractionInputTruncationDirection",
|
| 591 |
+
"FillMaskInput",
|
| 592 |
+
"FillMaskOutputElement",
|
| 593 |
+
"FillMaskParameters",
|
| 594 |
+
"GitCommitInfo",
|
| 595 |
+
"GitRefInfo",
|
| 596 |
+
"GitRefs",
|
| 597 |
+
"HFCacheInfo",
|
| 598 |
+
"HFSummaryWriter",
|
| 599 |
+
"HUGGINGFACE_CO_URL_HOME",
|
| 600 |
+
"HUGGINGFACE_CO_URL_TEMPLATE",
|
| 601 |
+
"HfApi",
|
| 602 |
+
"HfFileMetadata",
|
| 603 |
+
"HfFileSystem",
|
| 604 |
+
"HfFileSystemFile",
|
| 605 |
+
"HfFileSystemResolvedPath",
|
| 606 |
+
"HfFileSystemStreamFile",
|
| 607 |
+
"HfFolder",
|
| 608 |
+
"ImageClassificationInput",
|
| 609 |
+
"ImageClassificationOutputElement",
|
| 610 |
+
"ImageClassificationOutputTransform",
|
| 611 |
+
"ImageClassificationParameters",
|
| 612 |
+
"ImageSegmentationInput",
|
| 613 |
+
"ImageSegmentationOutputElement",
|
| 614 |
+
"ImageSegmentationParameters",
|
| 615 |
+
"ImageSegmentationSubtask",
|
| 616 |
+
"ImageToImageInput",
|
| 617 |
+
"ImageToImageOutput",
|
| 618 |
+
"ImageToImageParameters",
|
| 619 |
+
"ImageToImageTargetSize",
|
| 620 |
+
"ImageToTextEarlyStoppingEnum",
|
| 621 |
+
"ImageToTextGenerationParameters",
|
| 622 |
+
"ImageToTextInput",
|
| 623 |
+
"ImageToTextOutput",
|
| 624 |
+
"ImageToTextParameters",
|
| 625 |
+
"InferenceApi",
|
| 626 |
+
"InferenceClient",
|
| 627 |
+
"InferenceEndpoint",
|
| 628 |
+
"InferenceEndpointError",
|
| 629 |
+
"InferenceEndpointStatus",
|
| 630 |
+
"InferenceEndpointTimeoutError",
|
| 631 |
+
"InferenceEndpointType",
|
| 632 |
+
"InferenceTimeoutError",
|
| 633 |
+
"KerasModelHubMixin",
|
| 634 |
+
"ModelCard",
|
| 635 |
+
"ModelCardData",
|
| 636 |
+
"ModelHubMixin",
|
| 637 |
+
"ModelInfo",
|
| 638 |
+
"ObjectDetectionBoundingBox",
|
| 639 |
+
"ObjectDetectionInput",
|
| 640 |
+
"ObjectDetectionOutputElement",
|
| 641 |
+
"ObjectDetectionParameters",
|
| 642 |
+
"PYTORCH_WEIGHTS_NAME",
|
| 643 |
+
"Padding",
|
| 644 |
+
"PyTorchModelHubMixin",
|
| 645 |
+
"QuestionAnsweringInput",
|
| 646 |
+
"QuestionAnsweringInputData",
|
| 647 |
+
"QuestionAnsweringOutputElement",
|
| 648 |
+
"QuestionAnsweringParameters",
|
| 649 |
+
"REPO_TYPE_DATASET",
|
| 650 |
+
"REPO_TYPE_MODEL",
|
| 651 |
+
"REPO_TYPE_SPACE",
|
| 652 |
+
"RepoCard",
|
| 653 |
+
"RepoUrl",
|
| 654 |
+
"Repository",
|
| 655 |
+
"SentenceSimilarityInput",
|
| 656 |
+
"SentenceSimilarityInputData",
|
| 657 |
+
"SpaceCard",
|
| 658 |
+
"SpaceCardData",
|
| 659 |
+
"SpaceHardware",
|
| 660 |
+
"SpaceInfo",
|
| 661 |
+
"SpaceRuntime",
|
| 662 |
+
"SpaceStage",
|
| 663 |
+
"SpaceStorage",
|
| 664 |
+
"SpaceVariable",
|
| 665 |
+
"StateDictSplit",
|
| 666 |
+
"SummarizationInput",
|
| 667 |
+
"SummarizationOutput",
|
| 668 |
+
"SummarizationParameters",
|
| 669 |
+
"SummarizationTruncationStrategy",
|
| 670 |
+
"TF2_WEIGHTS_NAME",
|
| 671 |
+
"TF_WEIGHTS_NAME",
|
| 672 |
+
"TableQuestionAnsweringInput",
|
| 673 |
+
"TableQuestionAnsweringInputData",
|
| 674 |
+
"TableQuestionAnsweringOutputElement",
|
| 675 |
+
"TableQuestionAnsweringParameters",
|
| 676 |
+
"Text2TextGenerationInput",
|
| 677 |
+
"Text2TextGenerationOutput",
|
| 678 |
+
"Text2TextGenerationParameters",
|
| 679 |
+
"Text2TextGenerationTruncationStrategy",
|
| 680 |
+
"TextClassificationInput",
|
| 681 |
+
"TextClassificationOutputElement",
|
| 682 |
+
"TextClassificationOutputTransform",
|
| 683 |
+
"TextClassificationParameters",
|
| 684 |
+
"TextGenerationInput",
|
| 685 |
+
"TextGenerationInputGenerateParameters",
|
| 686 |
+
"TextGenerationInputGrammarType",
|
| 687 |
+
"TextGenerationOutput",
|
| 688 |
+
"TextGenerationOutputBestOfSequence",
|
| 689 |
+
"TextGenerationOutputDetails",
|
| 690 |
+
"TextGenerationOutputFinishReason",
|
| 691 |
+
"TextGenerationOutputPrefillToken",
|
| 692 |
+
"TextGenerationOutputToken",
|
| 693 |
+
"TextGenerationStreamOutput",
|
| 694 |
+
"TextGenerationStreamOutputStreamDetails",
|
| 695 |
+
"TextGenerationStreamOutputToken",
|
| 696 |
+
"TextToAudioEarlyStoppingEnum",
|
| 697 |
+
"TextToAudioGenerationParameters",
|
| 698 |
+
"TextToAudioInput",
|
| 699 |
+
"TextToAudioOutput",
|
| 700 |
+
"TextToAudioParameters",
|
| 701 |
+
"TextToImageInput",
|
| 702 |
+
"TextToImageOutput",
|
| 703 |
+
"TextToImageParameters",
|
| 704 |
+
"TextToSpeechEarlyStoppingEnum",
|
| 705 |
+
"TextToSpeechGenerationParameters",
|
| 706 |
+
"TextToSpeechInput",
|
| 707 |
+
"TextToSpeechOutput",
|
| 708 |
+
"TextToSpeechParameters",
|
| 709 |
+
"TextToVideoInput",
|
| 710 |
+
"TextToVideoOutput",
|
| 711 |
+
"TextToVideoParameters",
|
| 712 |
+
"TokenClassificationAggregationStrategy",
|
| 713 |
+
"TokenClassificationInput",
|
| 714 |
+
"TokenClassificationOutputElement",
|
| 715 |
+
"TokenClassificationParameters",
|
| 716 |
+
"TranslationInput",
|
| 717 |
+
"TranslationOutput",
|
| 718 |
+
"TranslationParameters",
|
| 719 |
+
"TranslationTruncationStrategy",
|
| 720 |
+
"TypeEnum",
|
| 721 |
+
"User",
|
| 722 |
+
"UserLikes",
|
| 723 |
+
"VideoClassificationInput",
|
| 724 |
+
"VideoClassificationOutputElement",
|
| 725 |
+
"VideoClassificationOutputTransform",
|
| 726 |
+
"VideoClassificationParameters",
|
| 727 |
+
"VisualQuestionAnsweringInput",
|
| 728 |
+
"VisualQuestionAnsweringInputData",
|
| 729 |
+
"VisualQuestionAnsweringOutputElement",
|
| 730 |
+
"VisualQuestionAnsweringParameters",
|
| 731 |
+
"WebhookInfo",
|
| 732 |
+
"WebhookPayload",
|
| 733 |
+
"WebhookPayloadComment",
|
| 734 |
+
"WebhookPayloadDiscussion",
|
| 735 |
+
"WebhookPayloadDiscussionChanges",
|
| 736 |
+
"WebhookPayloadEvent",
|
| 737 |
+
"WebhookPayloadMovedTo",
|
| 738 |
+
"WebhookPayloadRepo",
|
| 739 |
+
"WebhookPayloadUrl",
|
| 740 |
+
"WebhookPayloadWebhook",
|
| 741 |
+
"WebhookWatchedItem",
|
| 742 |
+
"WebhooksServer",
|
| 743 |
+
"ZeroShotClassificationInput",
|
| 744 |
+
"ZeroShotClassificationOutputElement",
|
| 745 |
+
"ZeroShotClassificationParameters",
|
| 746 |
+
"ZeroShotImageClassificationInput",
|
| 747 |
+
"ZeroShotImageClassificationOutputElement",
|
| 748 |
+
"ZeroShotImageClassificationParameters",
|
| 749 |
+
"ZeroShotObjectDetectionBoundingBox",
|
| 750 |
+
"ZeroShotObjectDetectionInput",
|
| 751 |
+
"ZeroShotObjectDetectionOutputElement",
|
| 752 |
+
"ZeroShotObjectDetectionParameters",
|
| 753 |
+
"_CACHED_NO_EXIST",
|
| 754 |
+
"_save_pretrained_fastai",
|
| 755 |
+
"accept_access_request",
|
| 756 |
+
"add_collection_item",
|
| 757 |
+
"add_space_secret",
|
| 758 |
+
"add_space_variable",
|
| 759 |
+
"auth_check",
|
| 760 |
+
"auth_list",
|
| 761 |
+
"auth_switch",
|
| 762 |
+
"cached_assets_path",
|
| 763 |
+
"cancel_access_request",
|
| 764 |
+
"change_discussion_status",
|
| 765 |
+
"comment_discussion",
|
| 766 |
+
"configure_http_backend",
|
| 767 |
+
"create_branch",
|
| 768 |
+
"create_collection",
|
| 769 |
+
"create_commit",
|
| 770 |
+
"create_discussion",
|
| 771 |
+
"create_inference_endpoint",
|
| 772 |
+
"create_pull_request",
|
| 773 |
+
"create_repo",
|
| 774 |
+
"create_tag",
|
| 775 |
+
"create_webhook",
|
| 776 |
+
"dataset_info",
|
| 777 |
+
"delete_branch",
|
| 778 |
+
"delete_collection",
|
| 779 |
+
"delete_collection_item",
|
| 780 |
+
"delete_file",
|
| 781 |
+
"delete_folder",
|
| 782 |
+
"delete_inference_endpoint",
|
| 783 |
+
"delete_repo",
|
| 784 |
+
"delete_space_secret",
|
| 785 |
+
"delete_space_storage",
|
| 786 |
+
"delete_space_variable",
|
| 787 |
+
"delete_tag",
|
| 788 |
+
"delete_webhook",
|
| 789 |
+
"disable_webhook",
|
| 790 |
+
"dump_environment_info",
|
| 791 |
+
"duplicate_space",
|
| 792 |
+
"edit_discussion_comment",
|
| 793 |
+
"enable_webhook",
|
| 794 |
+
"export_entries_as_dduf",
|
| 795 |
+
"export_folder_as_dduf",
|
| 796 |
+
"file_exists",
|
| 797 |
+
"from_pretrained_fastai",
|
| 798 |
+
"from_pretrained_keras",
|
| 799 |
+
"get_collection",
|
| 800 |
+
"get_dataset_tags",
|
| 801 |
+
"get_discussion_details",
|
| 802 |
+
"get_full_repo_name",
|
| 803 |
+
"get_hf_file_metadata",
|
| 804 |
+
"get_inference_endpoint",
|
| 805 |
+
"get_model_tags",
|
| 806 |
+
"get_paths_info",
|
| 807 |
+
"get_repo_discussions",
|
| 808 |
+
"get_safetensors_metadata",
|
| 809 |
+
"get_session",
|
| 810 |
+
"get_space_runtime",
|
| 811 |
+
"get_space_variables",
|
| 812 |
+
"get_tf_storage_size",
|
| 813 |
+
"get_token",
|
| 814 |
+
"get_token_permission",
|
| 815 |
+
"get_torch_storage_id",
|
| 816 |
+
"get_torch_storage_size",
|
| 817 |
+
"get_user_overview",
|
| 818 |
+
"get_webhook",
|
| 819 |
+
"grant_access",
|
| 820 |
+
"hf_hub_download",
|
| 821 |
+
"hf_hub_url",
|
| 822 |
+
"interpreter_login",
|
| 823 |
+
"list_accepted_access_requests",
|
| 824 |
+
"list_collections",
|
| 825 |
+
"list_datasets",
|
| 826 |
+
"list_inference_endpoints",
|
| 827 |
+
"list_liked_repos",
|
| 828 |
+
"list_models",
|
| 829 |
+
"list_organization_members",
|
| 830 |
+
"list_papers",
|
| 831 |
+
"list_pending_access_requests",
|
| 832 |
+
"list_rejected_access_requests",
|
| 833 |
+
"list_repo_commits",
|
| 834 |
+
"list_repo_files",
|
| 835 |
+
"list_repo_likers",
|
| 836 |
+
"list_repo_refs",
|
| 837 |
+
"list_repo_tree",
|
| 838 |
+
"list_spaces",
|
| 839 |
+
"list_user_followers",
|
| 840 |
+
"list_user_following",
|
| 841 |
+
"list_webhooks",
|
| 842 |
+
"load_state_dict_from_file",
|
| 843 |
+
"load_torch_model",
|
| 844 |
+
"logging",
|
| 845 |
+
"login",
|
| 846 |
+
"logout",
|
| 847 |
+
"merge_pull_request",
|
| 848 |
+
"metadata_eval_result",
|
| 849 |
+
"metadata_load",
|
| 850 |
+
"metadata_save",
|
| 851 |
+
"metadata_update",
|
| 852 |
+
"model_info",
|
| 853 |
+
"move_repo",
|
| 854 |
+
"notebook_login",
|
| 855 |
+
"paper_info",
|
| 856 |
+
"parse_safetensors_file_metadata",
|
| 857 |
+
"pause_inference_endpoint",
|
| 858 |
+
"pause_space",
|
| 859 |
+
"preupload_lfs_files",
|
| 860 |
+
"push_to_hub_fastai",
|
| 861 |
+
"push_to_hub_keras",
|
| 862 |
+
"read_dduf_file",
|
| 863 |
+
"reject_access_request",
|
| 864 |
+
"rename_discussion",
|
| 865 |
+
"repo_exists",
|
| 866 |
+
"repo_info",
|
| 867 |
+
"repo_type_and_id_from_hf_id",
|
| 868 |
+
"request_space_hardware",
|
| 869 |
+
"request_space_storage",
|
| 870 |
+
"restart_space",
|
| 871 |
+
"resume_inference_endpoint",
|
| 872 |
+
"revision_exists",
|
| 873 |
+
"run_as_future",
|
| 874 |
+
"save_pretrained_keras",
|
| 875 |
+
"save_torch_model",
|
| 876 |
+
"save_torch_state_dict",
|
| 877 |
+
"scale_to_zero_inference_endpoint",
|
| 878 |
+
"scan_cache_dir",
|
| 879 |
+
"set_space_sleep_time",
|
| 880 |
+
"snapshot_download",
|
| 881 |
+
"space_info",
|
| 882 |
+
"split_state_dict_into_shards_factory",
|
| 883 |
+
"split_tf_state_dict_into_shards",
|
| 884 |
+
"split_torch_state_dict_into_shards",
|
| 885 |
+
"super_squash_history",
|
| 886 |
+
"try_to_load_from_cache",
|
| 887 |
+
"unlike",
|
| 888 |
+
"update_collection_item",
|
| 889 |
+
"update_collection_metadata",
|
| 890 |
+
"update_inference_endpoint",
|
| 891 |
+
"update_repo_settings",
|
| 892 |
+
"update_repo_visibility",
|
| 893 |
+
"update_webhook",
|
| 894 |
+
"upload_file",
|
| 895 |
+
"upload_folder",
|
| 896 |
+
"upload_large_folder",
|
| 897 |
+
"webhook_endpoint",
|
| 898 |
+
"whoami",
|
| 899 |
+
]
|
| 900 |
+
|
| 901 |
+
|
| 902 |
+
def _attach(package_name, submodules=None, submod_attrs=None):
|
| 903 |
+
"""Attach lazily loaded submodules, functions, or other attributes.
|
| 904 |
+
|
| 905 |
+
Typically, modules import submodules and attributes as follows:
|
| 906 |
+
|
| 907 |
+
```py
|
| 908 |
+
import mysubmodule
|
| 909 |
+
import anothersubmodule
|
| 910 |
+
|
| 911 |
+
from .foo import someattr
|
| 912 |
+
```
|
| 913 |
+
|
| 914 |
+
The idea is to replace a package's `__getattr__`, `__dir__`, such that all imports
|
| 915 |
+
work exactly the way they would with normal imports, except that the import occurs
|
| 916 |
+
upon first use.
|
| 917 |
+
|
| 918 |
+
The typical way to call this function, replacing the above imports, is:
|
| 919 |
+
|
| 920 |
+
```python
|
| 921 |
+
__getattr__, __dir__ = lazy.attach(
|
| 922 |
+
__name__,
|
| 923 |
+
['mysubmodule', 'anothersubmodule'],
|
| 924 |
+
{'foo': ['someattr']}
|
| 925 |
+
)
|
| 926 |
+
```
|
| 927 |
+
This functionality requires Python 3.7 or higher.
|
| 928 |
+
|
| 929 |
+
Args:
|
| 930 |
+
package_name (`str`):
|
| 931 |
+
Typically use `__name__`.
|
| 932 |
+
submodules (`set`):
|
| 933 |
+
List of submodules to attach.
|
| 934 |
+
submod_attrs (`dict`):
|
| 935 |
+
Dictionary of submodule -> list of attributes / functions.
|
| 936 |
+
These attributes are imported as they are used.
|
| 937 |
+
|
| 938 |
+
Returns:
|
| 939 |
+
__getattr__, __dir__, __all__
|
| 940 |
+
|
| 941 |
+
"""
|
| 942 |
+
if submod_attrs is None:
|
| 943 |
+
submod_attrs = {}
|
| 944 |
+
|
| 945 |
+
if submodules is None:
|
| 946 |
+
submodules = set()
|
| 947 |
+
else:
|
| 948 |
+
submodules = set(submodules)
|
| 949 |
+
|
| 950 |
+
attr_to_modules = {attr: mod for mod, attrs in submod_attrs.items() for attr in attrs}
|
| 951 |
+
|
| 952 |
+
def __getattr__(name):
|
| 953 |
+
if name in submodules:
|
| 954 |
+
try:
|
| 955 |
+
return importlib.import_module(f"{package_name}.{name}")
|
| 956 |
+
except Exception as e:
|
| 957 |
+
print(f"Error importing {package_name}.{name}: {e}")
|
| 958 |
+
raise
|
| 959 |
+
elif name in attr_to_modules:
|
| 960 |
+
submod_path = f"{package_name}.{attr_to_modules[name]}"
|
| 961 |
+
try:
|
| 962 |
+
submod = importlib.import_module(submod_path)
|
| 963 |
+
except Exception as e:
|
| 964 |
+
print(f"Error importing {submod_path}: {e}")
|
| 965 |
+
raise
|
| 966 |
+
attr = getattr(submod, name)
|
| 967 |
+
|
| 968 |
+
# If the attribute lives in a file (module) with the same
|
| 969 |
+
# name as the attribute, ensure that the attribute and *not*
|
| 970 |
+
# the module is accessible on the package.
|
| 971 |
+
if name == attr_to_modules[name]:
|
| 972 |
+
pkg = sys.modules[package_name]
|
| 973 |
+
pkg.__dict__[name] = attr
|
| 974 |
+
|
| 975 |
+
return attr
|
| 976 |
+
else:
|
| 977 |
+
raise AttributeError(f"No {package_name} attribute {name}")
|
| 978 |
+
|
| 979 |
+
def __dir__():
|
| 980 |
+
return __all__
|
| 981 |
+
|
| 982 |
+
return __getattr__, __dir__
|
| 983 |
+
|
| 984 |
+
|
| 985 |
+
__getattr__, __dir__ = _attach(__name__, submodules=[], submod_attrs=_SUBMOD_ATTRS)
|
| 986 |
+
|
| 987 |
+
if os.environ.get("EAGER_IMPORT", ""):
|
| 988 |
+
for attr in __all__:
|
| 989 |
+
__getattr__(attr)
|
| 990 |
+
|
| 991 |
+
# WARNING: any content below this statement is generated automatically. Any manual edit
|
| 992 |
+
# will be lost when re-generating this file !
|
| 993 |
+
#
|
| 994 |
+
# To update the static imports, please run the following command and commit the changes.
|
| 995 |
+
# ```
|
| 996 |
+
# # Use script
|
| 997 |
+
# python utils/check_static_imports.py --update
|
| 998 |
+
#
|
| 999 |
+
# # Or run style on codebase
|
| 1000 |
+
# make style
|
| 1001 |
+
# ```
|
| 1002 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 1003 |
+
from ._commit_scheduler import CommitScheduler # noqa: F401
|
| 1004 |
+
from ._inference_endpoints import (
|
| 1005 |
+
InferenceEndpoint, # noqa: F401
|
| 1006 |
+
InferenceEndpointError, # noqa: F401
|
| 1007 |
+
InferenceEndpointStatus, # noqa: F401
|
| 1008 |
+
InferenceEndpointTimeoutError, # noqa: F401
|
| 1009 |
+
InferenceEndpointType, # noqa: F401
|
| 1010 |
+
)
|
| 1011 |
+
from ._login import (
|
| 1012 |
+
auth_list, # noqa: F401
|
| 1013 |
+
auth_switch, # noqa: F401
|
| 1014 |
+
interpreter_login, # noqa: F401
|
| 1015 |
+
login, # noqa: F401
|
| 1016 |
+
logout, # noqa: F401
|
| 1017 |
+
notebook_login, # noqa: F401
|
| 1018 |
+
)
|
| 1019 |
+
from ._snapshot_download import snapshot_download # noqa: F401
|
| 1020 |
+
from ._space_api import (
|
| 1021 |
+
SpaceHardware, # noqa: F401
|
| 1022 |
+
SpaceRuntime, # noqa: F401
|
| 1023 |
+
SpaceStage, # noqa: F401
|
| 1024 |
+
SpaceStorage, # noqa: F401
|
| 1025 |
+
SpaceVariable, # noqa: F401
|
| 1026 |
+
)
|
| 1027 |
+
from ._tensorboard_logger import HFSummaryWriter # noqa: F401
|
| 1028 |
+
from ._webhooks_payload import (
|
| 1029 |
+
WebhookPayload, # noqa: F401
|
| 1030 |
+
WebhookPayloadComment, # noqa: F401
|
| 1031 |
+
WebhookPayloadDiscussion, # noqa: F401
|
| 1032 |
+
WebhookPayloadDiscussionChanges, # noqa: F401
|
| 1033 |
+
WebhookPayloadEvent, # noqa: F401
|
| 1034 |
+
WebhookPayloadMovedTo, # noqa: F401
|
| 1035 |
+
WebhookPayloadRepo, # noqa: F401
|
| 1036 |
+
WebhookPayloadUrl, # noqa: F401
|
| 1037 |
+
WebhookPayloadWebhook, # noqa: F401
|
| 1038 |
+
)
|
| 1039 |
+
from ._webhooks_server import (
|
| 1040 |
+
WebhooksServer, # noqa: F401
|
| 1041 |
+
webhook_endpoint, # noqa: F401
|
| 1042 |
+
)
|
| 1043 |
+
from .community import (
|
| 1044 |
+
Discussion, # noqa: F401
|
| 1045 |
+
DiscussionComment, # noqa: F401
|
| 1046 |
+
DiscussionCommit, # noqa: F401
|
| 1047 |
+
DiscussionEvent, # noqa: F401
|
| 1048 |
+
DiscussionStatusChange, # noqa: F401
|
| 1049 |
+
DiscussionTitleChange, # noqa: F401
|
| 1050 |
+
DiscussionWithDetails, # noqa: F401
|
| 1051 |
+
)
|
| 1052 |
+
from .constants import (
|
| 1053 |
+
CONFIG_NAME, # noqa: F401
|
| 1054 |
+
FLAX_WEIGHTS_NAME, # noqa: F401
|
| 1055 |
+
HUGGINGFACE_CO_URL_HOME, # noqa: F401
|
| 1056 |
+
HUGGINGFACE_CO_URL_TEMPLATE, # noqa: F401
|
| 1057 |
+
PYTORCH_WEIGHTS_NAME, # noqa: F401
|
| 1058 |
+
REPO_TYPE_DATASET, # noqa: F401
|
| 1059 |
+
REPO_TYPE_MODEL, # noqa: F401
|
| 1060 |
+
REPO_TYPE_SPACE, # noqa: F401
|
| 1061 |
+
TF2_WEIGHTS_NAME, # noqa: F401
|
| 1062 |
+
TF_WEIGHTS_NAME, # noqa: F401
|
| 1063 |
+
)
|
| 1064 |
+
from .fastai_utils import (
|
| 1065 |
+
_save_pretrained_fastai, # noqa: F401
|
| 1066 |
+
from_pretrained_fastai, # noqa: F401
|
| 1067 |
+
push_to_hub_fastai, # noqa: F401
|
| 1068 |
+
)
|
| 1069 |
+
from .file_download import (
|
| 1070 |
+
_CACHED_NO_EXIST, # noqa: F401
|
| 1071 |
+
HfFileMetadata, # noqa: F401
|
| 1072 |
+
get_hf_file_metadata, # noqa: F401
|
| 1073 |
+
hf_hub_download, # noqa: F401
|
| 1074 |
+
hf_hub_url, # noqa: F401
|
| 1075 |
+
try_to_load_from_cache, # noqa: F401
|
| 1076 |
+
)
|
| 1077 |
+
from .hf_api import (
|
| 1078 |
+
Collection, # noqa: F401
|
| 1079 |
+
CollectionItem, # noqa: F401
|
| 1080 |
+
CommitInfo, # noqa: F401
|
| 1081 |
+
CommitOperation, # noqa: F401
|
| 1082 |
+
CommitOperationAdd, # noqa: F401
|
| 1083 |
+
CommitOperationCopy, # noqa: F401
|
| 1084 |
+
CommitOperationDelete, # noqa: F401
|
| 1085 |
+
DatasetInfo, # noqa: F401
|
| 1086 |
+
GitCommitInfo, # noqa: F401
|
| 1087 |
+
GitRefInfo, # noqa: F401
|
| 1088 |
+
GitRefs, # noqa: F401
|
| 1089 |
+
HfApi, # noqa: F401
|
| 1090 |
+
ModelInfo, # noqa: F401
|
| 1091 |
+
RepoUrl, # noqa: F401
|
| 1092 |
+
SpaceInfo, # noqa: F401
|
| 1093 |
+
User, # noqa: F401
|
| 1094 |
+
UserLikes, # noqa: F401
|
| 1095 |
+
WebhookInfo, # noqa: F401
|
| 1096 |
+
WebhookWatchedItem, # noqa: F401
|
| 1097 |
+
accept_access_request, # noqa: F401
|
| 1098 |
+
add_collection_item, # noqa: F401
|
| 1099 |
+
add_space_secret, # noqa: F401
|
| 1100 |
+
add_space_variable, # noqa: F401
|
| 1101 |
+
auth_check, # noqa: F401
|
| 1102 |
+
cancel_access_request, # noqa: F401
|
| 1103 |
+
change_discussion_status, # noqa: F401
|
| 1104 |
+
comment_discussion, # noqa: F401
|
| 1105 |
+
create_branch, # noqa: F401
|
| 1106 |
+
create_collection, # noqa: F401
|
| 1107 |
+
create_commit, # noqa: F401
|
| 1108 |
+
create_discussion, # noqa: F401
|
| 1109 |
+
create_inference_endpoint, # noqa: F401
|
| 1110 |
+
create_pull_request, # noqa: F401
|
| 1111 |
+
create_repo, # noqa: F401
|
| 1112 |
+
create_tag, # noqa: F401
|
| 1113 |
+
create_webhook, # noqa: F401
|
| 1114 |
+
dataset_info, # noqa: F401
|
| 1115 |
+
delete_branch, # noqa: F401
|
| 1116 |
+
delete_collection, # noqa: F401
|
| 1117 |
+
delete_collection_item, # noqa: F401
|
| 1118 |
+
delete_file, # noqa: F401
|
| 1119 |
+
delete_folder, # noqa: F401
|
| 1120 |
+
delete_inference_endpoint, # noqa: F401
|
| 1121 |
+
delete_repo, # noqa: F401
|
| 1122 |
+
delete_space_secret, # noqa: F401
|
| 1123 |
+
delete_space_storage, # noqa: F401
|
| 1124 |
+
delete_space_variable, # noqa: F401
|
| 1125 |
+
delete_tag, # noqa: F401
|
| 1126 |
+
delete_webhook, # noqa: F401
|
| 1127 |
+
disable_webhook, # noqa: F401
|
| 1128 |
+
duplicate_space, # noqa: F401
|
| 1129 |
+
edit_discussion_comment, # noqa: F401
|
| 1130 |
+
enable_webhook, # noqa: F401
|
| 1131 |
+
file_exists, # noqa: F401
|
| 1132 |
+
get_collection, # noqa: F401
|
| 1133 |
+
get_dataset_tags, # noqa: F401
|
| 1134 |
+
get_discussion_details, # noqa: F401
|
| 1135 |
+
get_full_repo_name, # noqa: F401
|
| 1136 |
+
get_inference_endpoint, # noqa: F401
|
| 1137 |
+
get_model_tags, # noqa: F401
|
| 1138 |
+
get_paths_info, # noqa: F401
|
| 1139 |
+
get_repo_discussions, # noqa: F401
|
| 1140 |
+
get_safetensors_metadata, # noqa: F401
|
| 1141 |
+
get_space_runtime, # noqa: F401
|
| 1142 |
+
get_space_variables, # noqa: F401
|
| 1143 |
+
get_token_permission, # noqa: F401
|
| 1144 |
+
get_user_overview, # noqa: F401
|
| 1145 |
+
get_webhook, # noqa: F401
|
| 1146 |
+
grant_access, # noqa: F401
|
| 1147 |
+
list_accepted_access_requests, # noqa: F401
|
| 1148 |
+
list_collections, # noqa: F401
|
| 1149 |
+
list_datasets, # noqa: F401
|
| 1150 |
+
list_inference_endpoints, # noqa: F401
|
| 1151 |
+
list_liked_repos, # noqa: F401
|
| 1152 |
+
list_models, # noqa: F401
|
| 1153 |
+
list_organization_members, # noqa: F401
|
| 1154 |
+
list_papers, # noqa: F401
|
| 1155 |
+
list_pending_access_requests, # noqa: F401
|
| 1156 |
+
list_rejected_access_requests, # noqa: F401
|
| 1157 |
+
list_repo_commits, # noqa: F401
|
| 1158 |
+
list_repo_files, # noqa: F401
|
| 1159 |
+
list_repo_likers, # noqa: F401
|
| 1160 |
+
list_repo_refs, # noqa: F401
|
| 1161 |
+
list_repo_tree, # noqa: F401
|
| 1162 |
+
list_spaces, # noqa: F401
|
| 1163 |
+
list_user_followers, # noqa: F401
|
| 1164 |
+
list_user_following, # noqa: F401
|
| 1165 |
+
list_webhooks, # noqa: F401
|
| 1166 |
+
merge_pull_request, # noqa: F401
|
| 1167 |
+
model_info, # noqa: F401
|
| 1168 |
+
move_repo, # noqa: F401
|
| 1169 |
+
paper_info, # noqa: F401
|
| 1170 |
+
parse_safetensors_file_metadata, # noqa: F401
|
| 1171 |
+
pause_inference_endpoint, # noqa: F401
|
| 1172 |
+
pause_space, # noqa: F401
|
| 1173 |
+
preupload_lfs_files, # noqa: F401
|
| 1174 |
+
reject_access_request, # noqa: F401
|
| 1175 |
+
rename_discussion, # noqa: F401
|
| 1176 |
+
repo_exists, # noqa: F401
|
| 1177 |
+
repo_info, # noqa: F401
|
| 1178 |
+
repo_type_and_id_from_hf_id, # noqa: F401
|
| 1179 |
+
request_space_hardware, # noqa: F401
|
| 1180 |
+
request_space_storage, # noqa: F401
|
| 1181 |
+
restart_space, # noqa: F401
|
| 1182 |
+
resume_inference_endpoint, # noqa: F401
|
| 1183 |
+
revision_exists, # noqa: F401
|
| 1184 |
+
run_as_future, # noqa: F401
|
| 1185 |
+
scale_to_zero_inference_endpoint, # noqa: F401
|
| 1186 |
+
set_space_sleep_time, # noqa: F401
|
| 1187 |
+
space_info, # noqa: F401
|
| 1188 |
+
super_squash_history, # noqa: F401
|
| 1189 |
+
unlike, # noqa: F401
|
| 1190 |
+
update_collection_item, # noqa: F401
|
| 1191 |
+
update_collection_metadata, # noqa: F401
|
| 1192 |
+
update_inference_endpoint, # noqa: F401
|
| 1193 |
+
update_repo_settings, # noqa: F401
|
| 1194 |
+
update_repo_visibility, # noqa: F401
|
| 1195 |
+
update_webhook, # noqa: F401
|
| 1196 |
+
upload_file, # noqa: F401
|
| 1197 |
+
upload_folder, # noqa: F401
|
| 1198 |
+
upload_large_folder, # noqa: F401
|
| 1199 |
+
whoami, # noqa: F401
|
| 1200 |
+
)
|
| 1201 |
+
from .hf_file_system import (
|
| 1202 |
+
HfFileSystem, # noqa: F401
|
| 1203 |
+
HfFileSystemFile, # noqa: F401
|
| 1204 |
+
HfFileSystemResolvedPath, # noqa: F401
|
| 1205 |
+
HfFileSystemStreamFile, # noqa: F401
|
| 1206 |
+
)
|
| 1207 |
+
from .hub_mixin import (
|
| 1208 |
+
ModelHubMixin, # noqa: F401
|
| 1209 |
+
PyTorchModelHubMixin, # noqa: F401
|
| 1210 |
+
)
|
| 1211 |
+
from .inference._client import (
|
| 1212 |
+
InferenceClient, # noqa: F401
|
| 1213 |
+
InferenceTimeoutError, # noqa: F401
|
| 1214 |
+
)
|
| 1215 |
+
from .inference._generated._async_client import AsyncInferenceClient # noqa: F401
|
| 1216 |
+
from .inference._generated.types import (
|
| 1217 |
+
AudioClassificationInput, # noqa: F401
|
| 1218 |
+
AudioClassificationOutputElement, # noqa: F401
|
| 1219 |
+
AudioClassificationOutputTransform, # noqa: F401
|
| 1220 |
+
AudioClassificationParameters, # noqa: F401
|
| 1221 |
+
AudioToAudioInput, # noqa: F401
|
| 1222 |
+
AudioToAudioOutputElement, # noqa: F401
|
| 1223 |
+
AutomaticSpeechRecognitionEarlyStoppingEnum, # noqa: F401
|
| 1224 |
+
AutomaticSpeechRecognitionGenerationParameters, # noqa: F401
|
| 1225 |
+
AutomaticSpeechRecognitionInput, # noqa: F401
|
| 1226 |
+
AutomaticSpeechRecognitionOutput, # noqa: F401
|
| 1227 |
+
AutomaticSpeechRecognitionOutputChunk, # noqa: F401
|
| 1228 |
+
AutomaticSpeechRecognitionParameters, # noqa: F401
|
| 1229 |
+
ChatCompletionInput, # noqa: F401
|
| 1230 |
+
ChatCompletionInputFunctionDefinition, # noqa: F401
|
| 1231 |
+
ChatCompletionInputFunctionName, # noqa: F401
|
| 1232 |
+
ChatCompletionInputGrammarType, # noqa: F401
|
| 1233 |
+
ChatCompletionInputGrammarTypeType, # noqa: F401
|
| 1234 |
+
ChatCompletionInputMessage, # noqa: F401
|
| 1235 |
+
ChatCompletionInputMessageChunk, # noqa: F401
|
| 1236 |
+
ChatCompletionInputMessageChunkType, # noqa: F401
|
| 1237 |
+
ChatCompletionInputStreamOptions, # noqa: F401
|
| 1238 |
+
ChatCompletionInputTool, # noqa: F401
|
| 1239 |
+
ChatCompletionInputToolChoiceClass, # noqa: F401
|
| 1240 |
+
ChatCompletionInputToolChoiceEnum, # noqa: F401
|
| 1241 |
+
ChatCompletionInputURL, # noqa: F401
|
| 1242 |
+
ChatCompletionOutput, # noqa: F401
|
| 1243 |
+
ChatCompletionOutputComplete, # noqa: F401
|
| 1244 |
+
ChatCompletionOutputFunctionDefinition, # noqa: F401
|
| 1245 |
+
ChatCompletionOutputLogprob, # noqa: F401
|
| 1246 |
+
ChatCompletionOutputLogprobs, # noqa: F401
|
| 1247 |
+
ChatCompletionOutputMessage, # noqa: F401
|
| 1248 |
+
ChatCompletionOutputToolCall, # noqa: F401
|
| 1249 |
+
ChatCompletionOutputTopLogprob, # noqa: F401
|
| 1250 |
+
ChatCompletionOutputUsage, # noqa: F401
|
| 1251 |
+
ChatCompletionStreamOutput, # noqa: F401
|
| 1252 |
+
ChatCompletionStreamOutputChoice, # noqa: F401
|
| 1253 |
+
ChatCompletionStreamOutputDelta, # noqa: F401
|
| 1254 |
+
ChatCompletionStreamOutputDeltaToolCall, # noqa: F401
|
| 1255 |
+
ChatCompletionStreamOutputFunction, # noqa: F401
|
| 1256 |
+
ChatCompletionStreamOutputLogprob, # noqa: F401
|
| 1257 |
+
ChatCompletionStreamOutputLogprobs, # noqa: F401
|
| 1258 |
+
ChatCompletionStreamOutputTopLogprob, # noqa: F401
|
| 1259 |
+
ChatCompletionStreamOutputUsage, # noqa: F401
|
| 1260 |
+
DepthEstimationInput, # noqa: F401
|
| 1261 |
+
DepthEstimationOutput, # noqa: F401
|
| 1262 |
+
DocumentQuestionAnsweringInput, # noqa: F401
|
| 1263 |
+
DocumentQuestionAnsweringInputData, # noqa: F401
|
| 1264 |
+
DocumentQuestionAnsweringOutputElement, # noqa: F401
|
| 1265 |
+
DocumentQuestionAnsweringParameters, # noqa: F401
|
| 1266 |
+
FeatureExtractionInput, # noqa: F401
|
| 1267 |
+
FeatureExtractionInputTruncationDirection, # noqa: F401
|
| 1268 |
+
FillMaskInput, # noqa: F401
|
| 1269 |
+
FillMaskOutputElement, # noqa: F401
|
| 1270 |
+
FillMaskParameters, # noqa: F401
|
| 1271 |
+
ImageClassificationInput, # noqa: F401
|
| 1272 |
+
ImageClassificationOutputElement, # noqa: F401
|
| 1273 |
+
ImageClassificationOutputTransform, # noqa: F401
|
| 1274 |
+
ImageClassificationParameters, # noqa: F401
|
| 1275 |
+
ImageSegmentationInput, # noqa: F401
|
| 1276 |
+
ImageSegmentationOutputElement, # noqa: F401
|
| 1277 |
+
ImageSegmentationParameters, # noqa: F401
|
| 1278 |
+
ImageSegmentationSubtask, # noqa: F401
|
| 1279 |
+
ImageToImageInput, # noqa: F401
|
| 1280 |
+
ImageToImageOutput, # noqa: F401
|
| 1281 |
+
ImageToImageParameters, # noqa: F401
|
| 1282 |
+
ImageToImageTargetSize, # noqa: F401
|
| 1283 |
+
ImageToTextEarlyStoppingEnum, # noqa: F401
|
| 1284 |
+
ImageToTextGenerationParameters, # noqa: F401
|
| 1285 |
+
ImageToTextInput, # noqa: F401
|
| 1286 |
+
ImageToTextOutput, # noqa: F401
|
| 1287 |
+
ImageToTextParameters, # noqa: F401
|
| 1288 |
+
ObjectDetectionBoundingBox, # noqa: F401
|
| 1289 |
+
ObjectDetectionInput, # noqa: F401
|
| 1290 |
+
ObjectDetectionOutputElement, # noqa: F401
|
| 1291 |
+
ObjectDetectionParameters, # noqa: F401
|
| 1292 |
+
Padding, # noqa: F401
|
| 1293 |
+
QuestionAnsweringInput, # noqa: F401
|
| 1294 |
+
QuestionAnsweringInputData, # noqa: F401
|
| 1295 |
+
QuestionAnsweringOutputElement, # noqa: F401
|
| 1296 |
+
QuestionAnsweringParameters, # noqa: F401
|
| 1297 |
+
SentenceSimilarityInput, # noqa: F401
|
| 1298 |
+
SentenceSimilarityInputData, # noqa: F401
|
| 1299 |
+
SummarizationInput, # noqa: F401
|
| 1300 |
+
SummarizationOutput, # noqa: F401
|
| 1301 |
+
SummarizationParameters, # noqa: F401
|
| 1302 |
+
SummarizationTruncationStrategy, # noqa: F401
|
| 1303 |
+
TableQuestionAnsweringInput, # noqa: F401
|
| 1304 |
+
TableQuestionAnsweringInputData, # noqa: F401
|
| 1305 |
+
TableQuestionAnsweringOutputElement, # noqa: F401
|
| 1306 |
+
TableQuestionAnsweringParameters, # noqa: F401
|
| 1307 |
+
Text2TextGenerationInput, # noqa: F401
|
| 1308 |
+
Text2TextGenerationOutput, # noqa: F401
|
| 1309 |
+
Text2TextGenerationParameters, # noqa: F401
|
| 1310 |
+
Text2TextGenerationTruncationStrategy, # noqa: F401
|
| 1311 |
+
TextClassificationInput, # noqa: F401
|
| 1312 |
+
TextClassificationOutputElement, # noqa: F401
|
| 1313 |
+
TextClassificationOutputTransform, # noqa: F401
|
| 1314 |
+
TextClassificationParameters, # noqa: F401
|
| 1315 |
+
TextGenerationInput, # noqa: F401
|
| 1316 |
+
TextGenerationInputGenerateParameters, # noqa: F401
|
| 1317 |
+
TextGenerationInputGrammarType, # noqa: F401
|
| 1318 |
+
TextGenerationOutput, # noqa: F401
|
| 1319 |
+
TextGenerationOutputBestOfSequence, # noqa: F401
|
| 1320 |
+
TextGenerationOutputDetails, # noqa: F401
|
| 1321 |
+
TextGenerationOutputFinishReason, # noqa: F401
|
| 1322 |
+
TextGenerationOutputPrefillToken, # noqa: F401
|
| 1323 |
+
TextGenerationOutputToken, # noqa: F401
|
| 1324 |
+
TextGenerationStreamOutput, # noqa: F401
|
| 1325 |
+
TextGenerationStreamOutputStreamDetails, # noqa: F401
|
| 1326 |
+
TextGenerationStreamOutputToken, # noqa: F401
|
| 1327 |
+
TextToAudioEarlyStoppingEnum, # noqa: F401
|
| 1328 |
+
TextToAudioGenerationParameters, # noqa: F401
|
| 1329 |
+
TextToAudioInput, # noqa: F401
|
| 1330 |
+
TextToAudioOutput, # noqa: F401
|
| 1331 |
+
TextToAudioParameters, # noqa: F401
|
| 1332 |
+
TextToImageInput, # noqa: F401
|
| 1333 |
+
TextToImageOutput, # noqa: F401
|
| 1334 |
+
TextToImageParameters, # noqa: F401
|
| 1335 |
+
TextToSpeechEarlyStoppingEnum, # noqa: F401
|
| 1336 |
+
TextToSpeechGenerationParameters, # noqa: F401
|
| 1337 |
+
TextToSpeechInput, # noqa: F401
|
| 1338 |
+
TextToSpeechOutput, # noqa: F401
|
| 1339 |
+
TextToSpeechParameters, # noqa: F401
|
| 1340 |
+
TextToVideoInput, # noqa: F401
|
| 1341 |
+
TextToVideoOutput, # noqa: F401
|
| 1342 |
+
TextToVideoParameters, # noqa: F401
|
| 1343 |
+
TokenClassificationAggregationStrategy, # noqa: F401
|
| 1344 |
+
TokenClassificationInput, # noqa: F401
|
| 1345 |
+
TokenClassificationOutputElement, # noqa: F401
|
| 1346 |
+
TokenClassificationParameters, # noqa: F401
|
| 1347 |
+
TranslationInput, # noqa: F401
|
| 1348 |
+
TranslationOutput, # noqa: F401
|
| 1349 |
+
TranslationParameters, # noqa: F401
|
| 1350 |
+
TranslationTruncationStrategy, # noqa: F401
|
| 1351 |
+
TypeEnum, # noqa: F401
|
| 1352 |
+
VideoClassificationInput, # noqa: F401
|
| 1353 |
+
VideoClassificationOutputElement, # noqa: F401
|
| 1354 |
+
VideoClassificationOutputTransform, # noqa: F401
|
| 1355 |
+
VideoClassificationParameters, # noqa: F401
|
| 1356 |
+
VisualQuestionAnsweringInput, # noqa: F401
|
| 1357 |
+
VisualQuestionAnsweringInputData, # noqa: F401
|
| 1358 |
+
VisualQuestionAnsweringOutputElement, # noqa: F401
|
| 1359 |
+
VisualQuestionAnsweringParameters, # noqa: F401
|
| 1360 |
+
ZeroShotClassificationInput, # noqa: F401
|
| 1361 |
+
ZeroShotClassificationOutputElement, # noqa: F401
|
| 1362 |
+
ZeroShotClassificationParameters, # noqa: F401
|
| 1363 |
+
ZeroShotImageClassificationInput, # noqa: F401
|
| 1364 |
+
ZeroShotImageClassificationOutputElement, # noqa: F401
|
| 1365 |
+
ZeroShotImageClassificationParameters, # noqa: F401
|
| 1366 |
+
ZeroShotObjectDetectionBoundingBox, # noqa: F401
|
| 1367 |
+
ZeroShotObjectDetectionInput, # noqa: F401
|
| 1368 |
+
ZeroShotObjectDetectionOutputElement, # noqa: F401
|
| 1369 |
+
ZeroShotObjectDetectionParameters, # noqa: F401
|
| 1370 |
+
)
|
| 1371 |
+
from .inference_api import InferenceApi # noqa: F401
|
| 1372 |
+
from .keras_mixin import (
|
| 1373 |
+
KerasModelHubMixin, # noqa: F401
|
| 1374 |
+
from_pretrained_keras, # noqa: F401
|
| 1375 |
+
push_to_hub_keras, # noqa: F401
|
| 1376 |
+
save_pretrained_keras, # noqa: F401
|
| 1377 |
+
)
|
| 1378 |
+
from .repocard import (
|
| 1379 |
+
DatasetCard, # noqa: F401
|
| 1380 |
+
ModelCard, # noqa: F401
|
| 1381 |
+
RepoCard, # noqa: F401
|
| 1382 |
+
SpaceCard, # noqa: F401
|
| 1383 |
+
metadata_eval_result, # noqa: F401
|
| 1384 |
+
metadata_load, # noqa: F401
|
| 1385 |
+
metadata_save, # noqa: F401
|
| 1386 |
+
metadata_update, # noqa: F401
|
| 1387 |
+
)
|
| 1388 |
+
from .repocard_data import (
|
| 1389 |
+
CardData, # noqa: F401
|
| 1390 |
+
DatasetCardData, # noqa: F401
|
| 1391 |
+
EvalResult, # noqa: F401
|
| 1392 |
+
ModelCardData, # noqa: F401
|
| 1393 |
+
SpaceCardData, # noqa: F401
|
| 1394 |
+
)
|
| 1395 |
+
from .repository import Repository # noqa: F401
|
| 1396 |
+
from .serialization import (
|
| 1397 |
+
StateDictSplit, # noqa: F401
|
| 1398 |
+
get_tf_storage_size, # noqa: F401
|
| 1399 |
+
get_torch_storage_id, # noqa: F401
|
| 1400 |
+
get_torch_storage_size, # noqa: F401
|
| 1401 |
+
load_state_dict_from_file, # noqa: F401
|
| 1402 |
+
load_torch_model, # noqa: F401
|
| 1403 |
+
save_torch_model, # noqa: F401
|
| 1404 |
+
save_torch_state_dict, # noqa: F401
|
| 1405 |
+
split_state_dict_into_shards_factory, # noqa: F401
|
| 1406 |
+
split_tf_state_dict_into_shards, # noqa: F401
|
| 1407 |
+
split_torch_state_dict_into_shards, # noqa: F401
|
| 1408 |
+
)
|
| 1409 |
+
from .serialization._dduf import (
|
| 1410 |
+
DDUFEntry, # noqa: F401
|
| 1411 |
+
export_entries_as_dduf, # noqa: F401
|
| 1412 |
+
export_folder_as_dduf, # noqa: F401
|
| 1413 |
+
read_dduf_file, # noqa: F401
|
| 1414 |
+
)
|
| 1415 |
+
from .utils import (
|
| 1416 |
+
CachedFileInfo, # noqa: F401
|
| 1417 |
+
CachedRepoInfo, # noqa: F401
|
| 1418 |
+
CachedRevisionInfo, # noqa: F401
|
| 1419 |
+
CacheNotFound, # noqa: F401
|
| 1420 |
+
CorruptedCacheException, # noqa: F401
|
| 1421 |
+
DeleteCacheStrategy, # noqa: F401
|
| 1422 |
+
HFCacheInfo, # noqa: F401
|
| 1423 |
+
HfFolder, # noqa: F401
|
| 1424 |
+
cached_assets_path, # noqa: F401
|
| 1425 |
+
configure_http_backend, # noqa: F401
|
| 1426 |
+
dump_environment_info, # noqa: F401
|
| 1427 |
+
get_session, # noqa: F401
|
| 1428 |
+
get_token, # noqa: F401
|
| 1429 |
+
logging, # noqa: F401
|
| 1430 |
+
scan_cache_dir, # noqa: F401
|
| 1431 |
+
)
|
parrot/lib/python3.10/site-packages/huggingface_hub/_commit_api.py
ADDED
|
@@ -0,0 +1,758 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Type definitions and utilities for the `create_commit` API
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import base64
|
| 6 |
+
import io
|
| 7 |
+
import os
|
| 8 |
+
import warnings
|
| 9 |
+
from collections import defaultdict
|
| 10 |
+
from contextlib import contextmanager
|
| 11 |
+
from dataclasses import dataclass, field
|
| 12 |
+
from itertools import groupby
|
| 13 |
+
from pathlib import Path, PurePosixPath
|
| 14 |
+
from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Iterable, Iterator, List, Literal, Optional, Tuple, Union
|
| 15 |
+
|
| 16 |
+
from tqdm.contrib.concurrent import thread_map
|
| 17 |
+
|
| 18 |
+
from . import constants
|
| 19 |
+
from .errors import EntryNotFoundError
|
| 20 |
+
from .file_download import hf_hub_url
|
| 21 |
+
from .lfs import UploadInfo, lfs_upload, post_lfs_batch_info
|
| 22 |
+
from .utils import (
|
| 23 |
+
FORBIDDEN_FOLDERS,
|
| 24 |
+
chunk_iterable,
|
| 25 |
+
get_session,
|
| 26 |
+
hf_raise_for_status,
|
| 27 |
+
logging,
|
| 28 |
+
sha,
|
| 29 |
+
tqdm_stream_file,
|
| 30 |
+
validate_hf_hub_args,
|
| 31 |
+
)
|
| 32 |
+
from .utils import tqdm as hf_tqdm
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
if TYPE_CHECKING:
|
| 36 |
+
from .hf_api import RepoFile
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
logger = logging.get_logger(__name__)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
UploadMode = Literal["lfs", "regular"]
|
| 43 |
+
|
| 44 |
+
# Max is 1,000 per request on the Hub for HfApi.get_paths_info
|
| 45 |
+
# Otherwise we get:
|
| 46 |
+
# HfHubHTTPError: 413 Client Error: Payload Too Large for url: https://huggingface.co/api/datasets/xxx (Request ID: xxx)\n\ntoo many parameters
|
| 47 |
+
# See https://github.com/huggingface/huggingface_hub/issues/1503
|
| 48 |
+
FETCH_LFS_BATCH_SIZE = 500
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@dataclass
|
| 52 |
+
class CommitOperationDelete:
|
| 53 |
+
"""
|
| 54 |
+
Data structure holding necessary info to delete a file or a folder from a repository
|
| 55 |
+
on the Hub.
|
| 56 |
+
|
| 57 |
+
Args:
|
| 58 |
+
path_in_repo (`str`):
|
| 59 |
+
Relative filepath in the repo, for example: `"checkpoints/1fec34a/weights.bin"`
|
| 60 |
+
for a file or `"checkpoints/1fec34a/"` for a folder.
|
| 61 |
+
is_folder (`bool` or `Literal["auto"]`, *optional*)
|
| 62 |
+
Whether the Delete Operation applies to a folder or not. If "auto", the path
|
| 63 |
+
type (file or folder) is guessed automatically by looking if path ends with
|
| 64 |
+
a "/" (folder) or not (file). To explicitly set the path type, you can set
|
| 65 |
+
`is_folder=True` or `is_folder=False`.
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
path_in_repo: str
|
| 69 |
+
is_folder: Union[bool, Literal["auto"]] = "auto"
|
| 70 |
+
|
| 71 |
+
def __post_init__(self):
|
| 72 |
+
self.path_in_repo = _validate_path_in_repo(self.path_in_repo)
|
| 73 |
+
|
| 74 |
+
if self.is_folder == "auto":
|
| 75 |
+
self.is_folder = self.path_in_repo.endswith("/")
|
| 76 |
+
if not isinstance(self.is_folder, bool):
|
| 77 |
+
raise ValueError(
|
| 78 |
+
f"Wrong value for `is_folder`. Must be one of [`True`, `False`, `'auto'`]. Got '{self.is_folder}'."
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
@dataclass
|
| 83 |
+
class CommitOperationCopy:
|
| 84 |
+
"""
|
| 85 |
+
Data structure holding necessary info to copy a file in a repository on the Hub.
|
| 86 |
+
|
| 87 |
+
Limitations:
|
| 88 |
+
- Only LFS files can be copied. To copy a regular file, you need to download it locally and re-upload it
|
| 89 |
+
- Cross-repository copies are not supported.
|
| 90 |
+
|
| 91 |
+
Note: you can combine a [`CommitOperationCopy`] and a [`CommitOperationDelete`] to rename an LFS file on the Hub.
|
| 92 |
+
|
| 93 |
+
Args:
|
| 94 |
+
src_path_in_repo (`str`):
|
| 95 |
+
Relative filepath in the repo of the file to be copied, e.g. `"checkpoints/1fec34a/weights.bin"`.
|
| 96 |
+
path_in_repo (`str`):
|
| 97 |
+
Relative filepath in the repo where to copy the file, e.g. `"checkpoints/1fec34a/weights_copy.bin"`.
|
| 98 |
+
src_revision (`str`, *optional*):
|
| 99 |
+
The git revision of the file to be copied. Can be any valid git revision.
|
| 100 |
+
Default to the target commit revision.
|
| 101 |
+
"""
|
| 102 |
+
|
| 103 |
+
src_path_in_repo: str
|
| 104 |
+
path_in_repo: str
|
| 105 |
+
src_revision: Optional[str] = None
|
| 106 |
+
# set to the OID of the file to be copied if it has already been uploaded
|
| 107 |
+
# useful to determine if a commit will be empty or not.
|
| 108 |
+
_src_oid: Optional[str] = None
|
| 109 |
+
# set to the OID of the file to copy to if it has already been uploaded
|
| 110 |
+
# useful to determine if a commit will be empty or not.
|
| 111 |
+
_dest_oid: Optional[str] = None
|
| 112 |
+
|
| 113 |
+
def __post_init__(self):
|
| 114 |
+
self.src_path_in_repo = _validate_path_in_repo(self.src_path_in_repo)
|
| 115 |
+
self.path_in_repo = _validate_path_in_repo(self.path_in_repo)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@dataclass
|
| 119 |
+
class CommitOperationAdd:
|
| 120 |
+
"""
|
| 121 |
+
Data structure holding necessary info to upload a file to a repository on the Hub.
|
| 122 |
+
|
| 123 |
+
Args:
|
| 124 |
+
path_in_repo (`str`):
|
| 125 |
+
Relative filepath in the repo, for example: `"checkpoints/1fec34a/weights.bin"`
|
| 126 |
+
path_or_fileobj (`str`, `Path`, `bytes`, or `BinaryIO`):
|
| 127 |
+
Either:
|
| 128 |
+
- a path to a local file (as `str` or `pathlib.Path`) to upload
|
| 129 |
+
- a buffer of bytes (`bytes`) holding the content of the file to upload
|
| 130 |
+
- a "file object" (subclass of `io.BufferedIOBase`), typically obtained
|
| 131 |
+
with `open(path, "rb")`. It must support `seek()` and `tell()` methods.
|
| 132 |
+
|
| 133 |
+
Raises:
|
| 134 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 135 |
+
If `path_or_fileobj` is not one of `str`, `Path`, `bytes` or `io.BufferedIOBase`.
|
| 136 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 137 |
+
If `path_or_fileobj` is a `str` or `Path` but not a path to an existing file.
|
| 138 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 139 |
+
If `path_or_fileobj` is a `io.BufferedIOBase` but it doesn't support both
|
| 140 |
+
`seek()` and `tell()`.
|
| 141 |
+
"""
|
| 142 |
+
|
| 143 |
+
path_in_repo: str
|
| 144 |
+
path_or_fileobj: Union[str, Path, bytes, BinaryIO]
|
| 145 |
+
upload_info: UploadInfo = field(init=False, repr=False)
|
| 146 |
+
|
| 147 |
+
# Internal attributes
|
| 148 |
+
|
| 149 |
+
# set to "lfs" or "regular" once known
|
| 150 |
+
_upload_mode: Optional[UploadMode] = field(init=False, repr=False, default=None)
|
| 151 |
+
|
| 152 |
+
# set to True if .gitignore rules prevent the file from being uploaded as LFS
|
| 153 |
+
# (server-side check)
|
| 154 |
+
_should_ignore: Optional[bool] = field(init=False, repr=False, default=None)
|
| 155 |
+
|
| 156 |
+
# set to the remote OID of the file if it has already been uploaded
|
| 157 |
+
# useful to determine if a commit will be empty or not
|
| 158 |
+
_remote_oid: Optional[str] = field(init=False, repr=False, default=None)
|
| 159 |
+
|
| 160 |
+
# set to True once the file has been uploaded as LFS
|
| 161 |
+
_is_uploaded: bool = field(init=False, repr=False, default=False)
|
| 162 |
+
|
| 163 |
+
# set to True once the file has been committed
|
| 164 |
+
_is_committed: bool = field(init=False, repr=False, default=False)
|
| 165 |
+
|
| 166 |
+
def __post_init__(self) -> None:
|
| 167 |
+
"""Validates `path_or_fileobj` and compute `upload_info`."""
|
| 168 |
+
self.path_in_repo = _validate_path_in_repo(self.path_in_repo)
|
| 169 |
+
|
| 170 |
+
# Validate `path_or_fileobj` value
|
| 171 |
+
if isinstance(self.path_or_fileobj, Path):
|
| 172 |
+
self.path_or_fileobj = str(self.path_or_fileobj)
|
| 173 |
+
if isinstance(self.path_or_fileobj, str):
|
| 174 |
+
path_or_fileobj = os.path.normpath(os.path.expanduser(self.path_or_fileobj))
|
| 175 |
+
if not os.path.isfile(path_or_fileobj):
|
| 176 |
+
raise ValueError(f"Provided path: '{path_or_fileobj}' is not a file on the local file system")
|
| 177 |
+
elif not isinstance(self.path_or_fileobj, (io.BufferedIOBase, bytes)):
|
| 178 |
+
# ^^ Inspired from: https://stackoverflow.com/questions/44584829/how-to-determine-if-file-is-opened-in-binary-or-text-mode
|
| 179 |
+
raise ValueError(
|
| 180 |
+
"path_or_fileobj must be either an instance of str, bytes or"
|
| 181 |
+
" io.BufferedIOBase. If you passed a file-like object, make sure it is"
|
| 182 |
+
" in binary mode."
|
| 183 |
+
)
|
| 184 |
+
if isinstance(self.path_or_fileobj, io.BufferedIOBase):
|
| 185 |
+
try:
|
| 186 |
+
self.path_or_fileobj.tell()
|
| 187 |
+
self.path_or_fileobj.seek(0, os.SEEK_CUR)
|
| 188 |
+
except (OSError, AttributeError) as exc:
|
| 189 |
+
raise ValueError(
|
| 190 |
+
"path_or_fileobj is a file-like object but does not implement seek() and tell()"
|
| 191 |
+
) from exc
|
| 192 |
+
|
| 193 |
+
# Compute "upload_info" attribute
|
| 194 |
+
if isinstance(self.path_or_fileobj, str):
|
| 195 |
+
self.upload_info = UploadInfo.from_path(self.path_or_fileobj)
|
| 196 |
+
elif isinstance(self.path_or_fileobj, bytes):
|
| 197 |
+
self.upload_info = UploadInfo.from_bytes(self.path_or_fileobj)
|
| 198 |
+
else:
|
| 199 |
+
self.upload_info = UploadInfo.from_fileobj(self.path_or_fileobj)
|
| 200 |
+
|
| 201 |
+
@contextmanager
|
| 202 |
+
def as_file(self, with_tqdm: bool = False) -> Iterator[BinaryIO]:
|
| 203 |
+
"""
|
| 204 |
+
A context manager that yields a file-like object allowing to read the underlying
|
| 205 |
+
data behind `path_or_fileobj`.
|
| 206 |
+
|
| 207 |
+
Args:
|
| 208 |
+
with_tqdm (`bool`, *optional*, defaults to `False`):
|
| 209 |
+
If True, iterating over the file object will display a progress bar. Only
|
| 210 |
+
works if the file-like object is a path to a file. Pure bytes and buffers
|
| 211 |
+
are not supported.
|
| 212 |
+
|
| 213 |
+
Example:
|
| 214 |
+
|
| 215 |
+
```python
|
| 216 |
+
>>> operation = CommitOperationAdd(
|
| 217 |
+
... path_in_repo="remote/dir/weights.h5",
|
| 218 |
+
... path_or_fileobj="./local/weights.h5",
|
| 219 |
+
... )
|
| 220 |
+
CommitOperationAdd(path_in_repo='remote/dir/weights.h5', path_or_fileobj='./local/weights.h5')
|
| 221 |
+
|
| 222 |
+
>>> with operation.as_file() as file:
|
| 223 |
+
... content = file.read()
|
| 224 |
+
|
| 225 |
+
>>> with operation.as_file(with_tqdm=True) as file:
|
| 226 |
+
... while True:
|
| 227 |
+
... data = file.read(1024)
|
| 228 |
+
... if not data:
|
| 229 |
+
... break
|
| 230 |
+
config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
|
| 231 |
+
|
| 232 |
+
>>> with operation.as_file(with_tqdm=True) as file:
|
| 233 |
+
... requests.put(..., data=file)
|
| 234 |
+
config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
|
| 235 |
+
```
|
| 236 |
+
"""
|
| 237 |
+
if isinstance(self.path_or_fileobj, str) or isinstance(self.path_or_fileobj, Path):
|
| 238 |
+
if with_tqdm:
|
| 239 |
+
with tqdm_stream_file(self.path_or_fileobj) as file:
|
| 240 |
+
yield file
|
| 241 |
+
else:
|
| 242 |
+
with open(self.path_or_fileobj, "rb") as file:
|
| 243 |
+
yield file
|
| 244 |
+
elif isinstance(self.path_or_fileobj, bytes):
|
| 245 |
+
yield io.BytesIO(self.path_or_fileobj)
|
| 246 |
+
elif isinstance(self.path_or_fileobj, io.BufferedIOBase):
|
| 247 |
+
prev_pos = self.path_or_fileobj.tell()
|
| 248 |
+
yield self.path_or_fileobj
|
| 249 |
+
self.path_or_fileobj.seek(prev_pos, io.SEEK_SET)
|
| 250 |
+
|
| 251 |
+
def b64content(self) -> bytes:
|
| 252 |
+
"""
|
| 253 |
+
The base64-encoded content of `path_or_fileobj`
|
| 254 |
+
|
| 255 |
+
Returns: `bytes`
|
| 256 |
+
"""
|
| 257 |
+
with self.as_file() as file:
|
| 258 |
+
return base64.b64encode(file.read())
|
| 259 |
+
|
| 260 |
+
@property
|
| 261 |
+
def _local_oid(self) -> Optional[str]:
|
| 262 |
+
"""Return the OID of the local file.
|
| 263 |
+
|
| 264 |
+
This OID is then compared to `self._remote_oid` to check if the file has changed compared to the remote one.
|
| 265 |
+
If the file did not change, we won't upload it again to prevent empty commits.
|
| 266 |
+
|
| 267 |
+
For LFS files, the OID corresponds to the SHA256 of the file content (used a LFS ref).
|
| 268 |
+
For regular files, the OID corresponds to the SHA1 of the file content.
|
| 269 |
+
Note: this is slightly different to git OID computation since the oid of an LFS file is usually the git-SHA1 of the
|
| 270 |
+
pointer file content (not the actual file content). However, using the SHA256 is enough to detect changes
|
| 271 |
+
and more convenient client-side.
|
| 272 |
+
"""
|
| 273 |
+
if self._upload_mode is None:
|
| 274 |
+
return None
|
| 275 |
+
elif self._upload_mode == "lfs":
|
| 276 |
+
return self.upload_info.sha256.hex()
|
| 277 |
+
else:
|
| 278 |
+
# Regular file => compute sha1
|
| 279 |
+
# => no need to read by chunk since the file is guaranteed to be <=5MB.
|
| 280 |
+
with self.as_file() as file:
|
| 281 |
+
return sha.git_hash(file.read())
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
def _validate_path_in_repo(path_in_repo: str) -> str:
|
| 285 |
+
# Validate `path_in_repo` value to prevent a server-side issue
|
| 286 |
+
if path_in_repo.startswith("/"):
|
| 287 |
+
path_in_repo = path_in_repo[1:]
|
| 288 |
+
if path_in_repo == "." or path_in_repo == ".." or path_in_repo.startswith("../"):
|
| 289 |
+
raise ValueError(f"Invalid `path_in_repo` in CommitOperation: '{path_in_repo}'")
|
| 290 |
+
if path_in_repo.startswith("./"):
|
| 291 |
+
path_in_repo = path_in_repo[2:]
|
| 292 |
+
for forbidden in FORBIDDEN_FOLDERS:
|
| 293 |
+
if any(part == forbidden for part in path_in_repo.split("/")):
|
| 294 |
+
raise ValueError(
|
| 295 |
+
f"Invalid `path_in_repo` in CommitOperation: cannot update files under a '{forbidden}/' folder (path:"
|
| 296 |
+
f" '{path_in_repo}')."
|
| 297 |
+
)
|
| 298 |
+
return path_in_repo
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
CommitOperation = Union[CommitOperationAdd, CommitOperationCopy, CommitOperationDelete]
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
|
| 305 |
+
"""
|
| 306 |
+
Warn user when a list of operations is expected to overwrite itself in a single
|
| 307 |
+
commit.
|
| 308 |
+
|
| 309 |
+
Rules:
|
| 310 |
+
- If a filepath is updated by multiple `CommitOperationAdd` operations, a warning
|
| 311 |
+
message is triggered.
|
| 312 |
+
- If a filepath is updated at least once by a `CommitOperationAdd` and then deleted
|
| 313 |
+
by a `CommitOperationDelete`, a warning is triggered.
|
| 314 |
+
- If a `CommitOperationDelete` deletes a filepath that is then updated by a
|
| 315 |
+
`CommitOperationAdd`, no warning is triggered. This is usually useless (no need to
|
| 316 |
+
delete before upload) but can happen if a user deletes an entire folder and then
|
| 317 |
+
add new files to it.
|
| 318 |
+
"""
|
| 319 |
+
nb_additions_per_path: Dict[str, int] = defaultdict(int)
|
| 320 |
+
for operation in operations:
|
| 321 |
+
path_in_repo = operation.path_in_repo
|
| 322 |
+
if isinstance(operation, CommitOperationAdd):
|
| 323 |
+
if nb_additions_per_path[path_in_repo] > 0:
|
| 324 |
+
warnings.warn(
|
| 325 |
+
"About to update multiple times the same file in the same commit:"
|
| 326 |
+
f" '{path_in_repo}'. This can cause undesired inconsistencies in"
|
| 327 |
+
" your repo."
|
| 328 |
+
)
|
| 329 |
+
nb_additions_per_path[path_in_repo] += 1
|
| 330 |
+
for parent in PurePosixPath(path_in_repo).parents:
|
| 331 |
+
# Also keep track of number of updated files per folder
|
| 332 |
+
# => warns if deleting a folder overwrite some contained files
|
| 333 |
+
nb_additions_per_path[str(parent)] += 1
|
| 334 |
+
if isinstance(operation, CommitOperationDelete):
|
| 335 |
+
if nb_additions_per_path[str(PurePosixPath(path_in_repo))] > 0:
|
| 336 |
+
if operation.is_folder:
|
| 337 |
+
warnings.warn(
|
| 338 |
+
"About to delete a folder containing files that have just been"
|
| 339 |
+
f" updated within the same commit: '{path_in_repo}'. This can"
|
| 340 |
+
" cause undesired inconsistencies in your repo."
|
| 341 |
+
)
|
| 342 |
+
else:
|
| 343 |
+
warnings.warn(
|
| 344 |
+
"About to delete a file that have just been updated within the"
|
| 345 |
+
f" same commit: '{path_in_repo}'. This can cause undesired"
|
| 346 |
+
" inconsistencies in your repo."
|
| 347 |
+
)
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
@validate_hf_hub_args
|
| 351 |
+
def _upload_lfs_files(
|
| 352 |
+
*,
|
| 353 |
+
additions: List[CommitOperationAdd],
|
| 354 |
+
repo_type: str,
|
| 355 |
+
repo_id: str,
|
| 356 |
+
headers: Dict[str, str],
|
| 357 |
+
endpoint: Optional[str] = None,
|
| 358 |
+
num_threads: int = 5,
|
| 359 |
+
revision: Optional[str] = None,
|
| 360 |
+
):
|
| 361 |
+
"""
|
| 362 |
+
Uploads the content of `additions` to the Hub using the large file storage protocol.
|
| 363 |
+
|
| 364 |
+
Relevant external documentation:
|
| 365 |
+
- LFS Batch API: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md
|
| 366 |
+
|
| 367 |
+
Args:
|
| 368 |
+
additions (`List` of `CommitOperationAdd`):
|
| 369 |
+
The files to be uploaded
|
| 370 |
+
repo_type (`str`):
|
| 371 |
+
Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
|
| 372 |
+
repo_id (`str`):
|
| 373 |
+
A namespace (user or an organization) and a repo name separated
|
| 374 |
+
by a `/`.
|
| 375 |
+
headers (`Dict[str, str]`):
|
| 376 |
+
Headers to use for the request, including authorization headers and user agent.
|
| 377 |
+
num_threads (`int`, *optional*):
|
| 378 |
+
The number of concurrent threads to use when uploading. Defaults to 5.
|
| 379 |
+
revision (`str`, *optional*):
|
| 380 |
+
The git revision to upload to.
|
| 381 |
+
|
| 382 |
+
Raises:
|
| 383 |
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
| 384 |
+
If an upload failed for any reason
|
| 385 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 386 |
+
If the server returns malformed responses
|
| 387 |
+
[`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError)
|
| 388 |
+
If the LFS batch endpoint returned an HTTP error.
|
| 389 |
+
"""
|
| 390 |
+
# Step 1: retrieve upload instructions from the LFS batch endpoint.
|
| 391 |
+
# Upload instructions are retrieved by chunk of 256 files to avoid reaching
|
| 392 |
+
# the payload limit.
|
| 393 |
+
batch_actions: List[Dict] = []
|
| 394 |
+
for chunk in chunk_iterable(additions, chunk_size=256):
|
| 395 |
+
batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
|
| 396 |
+
upload_infos=[op.upload_info for op in chunk],
|
| 397 |
+
repo_id=repo_id,
|
| 398 |
+
repo_type=repo_type,
|
| 399 |
+
revision=revision,
|
| 400 |
+
endpoint=endpoint,
|
| 401 |
+
headers=headers,
|
| 402 |
+
token=None, # already passed in 'headers'
|
| 403 |
+
)
|
| 404 |
+
|
| 405 |
+
# If at least 1 error, we do not retrieve information for other chunks
|
| 406 |
+
if batch_errors_chunk:
|
| 407 |
+
message = "\n".join(
|
| 408 |
+
[
|
| 409 |
+
f"Encountered error for file with OID {err.get('oid')}: `{err.get('error', {}).get('message')}"
|
| 410 |
+
for err in batch_errors_chunk
|
| 411 |
+
]
|
| 412 |
+
)
|
| 413 |
+
raise ValueError(f"LFS batch endpoint returned errors:\n{message}")
|
| 414 |
+
|
| 415 |
+
batch_actions += batch_actions_chunk
|
| 416 |
+
oid2addop = {add_op.upload_info.sha256.hex(): add_op for add_op in additions}
|
| 417 |
+
|
| 418 |
+
# Step 2: ignore files that have already been uploaded
|
| 419 |
+
filtered_actions = []
|
| 420 |
+
for action in batch_actions:
|
| 421 |
+
if action.get("actions") is None:
|
| 422 |
+
logger.debug(
|
| 423 |
+
f"Content of file {oid2addop[action['oid']].path_in_repo} is already"
|
| 424 |
+
" present upstream - skipping upload."
|
| 425 |
+
)
|
| 426 |
+
else:
|
| 427 |
+
filtered_actions.append(action)
|
| 428 |
+
|
| 429 |
+
if len(filtered_actions) == 0:
|
| 430 |
+
logger.debug("No LFS files to upload.")
|
| 431 |
+
return
|
| 432 |
+
|
| 433 |
+
# Step 3: upload files concurrently according to these instructions
|
| 434 |
+
def _wrapped_lfs_upload(batch_action) -> None:
|
| 435 |
+
try:
|
| 436 |
+
operation = oid2addop[batch_action["oid"]]
|
| 437 |
+
lfs_upload(operation=operation, lfs_batch_action=batch_action, headers=headers, endpoint=endpoint)
|
| 438 |
+
except Exception as exc:
|
| 439 |
+
raise RuntimeError(f"Error while uploading '{operation.path_in_repo}' to the Hub.") from exc
|
| 440 |
+
|
| 441 |
+
if constants.HF_HUB_ENABLE_HF_TRANSFER:
|
| 442 |
+
logger.debug(f"Uploading {len(filtered_actions)} LFS files to the Hub using `hf_transfer`.")
|
| 443 |
+
for action in hf_tqdm(filtered_actions, name="huggingface_hub.lfs_upload"):
|
| 444 |
+
_wrapped_lfs_upload(action)
|
| 445 |
+
elif len(filtered_actions) == 1:
|
| 446 |
+
logger.debug("Uploading 1 LFS file to the Hub")
|
| 447 |
+
_wrapped_lfs_upload(filtered_actions[0])
|
| 448 |
+
else:
|
| 449 |
+
logger.debug(
|
| 450 |
+
f"Uploading {len(filtered_actions)} LFS files to the Hub using up to {num_threads} threads concurrently"
|
| 451 |
+
)
|
| 452 |
+
thread_map(
|
| 453 |
+
_wrapped_lfs_upload,
|
| 454 |
+
filtered_actions,
|
| 455 |
+
desc=f"Upload {len(filtered_actions)} LFS files",
|
| 456 |
+
max_workers=num_threads,
|
| 457 |
+
tqdm_class=hf_tqdm,
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
def _validate_preupload_info(preupload_info: dict):
|
| 462 |
+
files = preupload_info.get("files")
|
| 463 |
+
if not isinstance(files, list):
|
| 464 |
+
raise ValueError("preupload_info is improperly formatted")
|
| 465 |
+
for file_info in files:
|
| 466 |
+
if not (
|
| 467 |
+
isinstance(file_info, dict)
|
| 468 |
+
and isinstance(file_info.get("path"), str)
|
| 469 |
+
and isinstance(file_info.get("uploadMode"), str)
|
| 470 |
+
and (file_info["uploadMode"] in ("lfs", "regular"))
|
| 471 |
+
):
|
| 472 |
+
raise ValueError("preupload_info is improperly formatted:")
|
| 473 |
+
return preupload_info
|
| 474 |
+
|
| 475 |
+
|
| 476 |
+
@validate_hf_hub_args
|
| 477 |
+
def _fetch_upload_modes(
|
| 478 |
+
additions: Iterable[CommitOperationAdd],
|
| 479 |
+
repo_type: str,
|
| 480 |
+
repo_id: str,
|
| 481 |
+
headers: Dict[str, str],
|
| 482 |
+
revision: str,
|
| 483 |
+
endpoint: Optional[str] = None,
|
| 484 |
+
create_pr: bool = False,
|
| 485 |
+
gitignore_content: Optional[str] = None,
|
| 486 |
+
) -> None:
|
| 487 |
+
"""
|
| 488 |
+
Requests the Hub "preupload" endpoint to determine whether each input file should be uploaded as a regular git blob
|
| 489 |
+
or as git LFS blob. Input `additions` are mutated in-place with the upload mode.
|
| 490 |
+
|
| 491 |
+
Args:
|
| 492 |
+
additions (`Iterable` of :class:`CommitOperationAdd`):
|
| 493 |
+
Iterable of :class:`CommitOperationAdd` describing the files to
|
| 494 |
+
upload to the Hub.
|
| 495 |
+
repo_type (`str`):
|
| 496 |
+
Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
|
| 497 |
+
repo_id (`str`):
|
| 498 |
+
A namespace (user or an organization) and a repo name separated
|
| 499 |
+
by a `/`.
|
| 500 |
+
headers (`Dict[str, str]`):
|
| 501 |
+
Headers to use for the request, including authorization headers and user agent.
|
| 502 |
+
revision (`str`):
|
| 503 |
+
The git revision to upload the files to. Can be any valid git revision.
|
| 504 |
+
gitignore_content (`str`, *optional*):
|
| 505 |
+
The content of the `.gitignore` file to know which files should be ignored. The order of priority
|
| 506 |
+
is to first check if `gitignore_content` is passed, then check if the `.gitignore` file is present
|
| 507 |
+
in the list of files to commit and finally default to the `.gitignore` file already hosted on the Hub
|
| 508 |
+
(if any).
|
| 509 |
+
Raises:
|
| 510 |
+
[`~utils.HfHubHTTPError`]
|
| 511 |
+
If the Hub API returned an error.
|
| 512 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 513 |
+
If the Hub API response is improperly formatted.
|
| 514 |
+
"""
|
| 515 |
+
endpoint = endpoint if endpoint is not None else constants.ENDPOINT
|
| 516 |
+
|
| 517 |
+
# Fetch upload mode (LFS or regular) chunk by chunk.
|
| 518 |
+
upload_modes: Dict[str, UploadMode] = {}
|
| 519 |
+
should_ignore_info: Dict[str, bool] = {}
|
| 520 |
+
oid_info: Dict[str, Optional[str]] = {}
|
| 521 |
+
|
| 522 |
+
for chunk in chunk_iterable(additions, 256):
|
| 523 |
+
payload: Dict = {
|
| 524 |
+
"files": [
|
| 525 |
+
{
|
| 526 |
+
"path": op.path_in_repo,
|
| 527 |
+
"sample": base64.b64encode(op.upload_info.sample).decode("ascii"),
|
| 528 |
+
"size": op.upload_info.size,
|
| 529 |
+
}
|
| 530 |
+
for op in chunk
|
| 531 |
+
]
|
| 532 |
+
}
|
| 533 |
+
if gitignore_content is not None:
|
| 534 |
+
payload["gitIgnore"] = gitignore_content
|
| 535 |
+
|
| 536 |
+
resp = get_session().post(
|
| 537 |
+
f"{endpoint}/api/{repo_type}s/{repo_id}/preupload/{revision}",
|
| 538 |
+
json=payload,
|
| 539 |
+
headers=headers,
|
| 540 |
+
params={"create_pr": "1"} if create_pr else None,
|
| 541 |
+
)
|
| 542 |
+
hf_raise_for_status(resp)
|
| 543 |
+
preupload_info = _validate_preupload_info(resp.json())
|
| 544 |
+
upload_modes.update(**{file["path"]: file["uploadMode"] for file in preupload_info["files"]})
|
| 545 |
+
should_ignore_info.update(**{file["path"]: file["shouldIgnore"] for file in preupload_info["files"]})
|
| 546 |
+
oid_info.update(**{file["path"]: file.get("oid") for file in preupload_info["files"]})
|
| 547 |
+
|
| 548 |
+
# Set upload mode for each addition operation
|
| 549 |
+
for addition in additions:
|
| 550 |
+
addition._upload_mode = upload_modes[addition.path_in_repo]
|
| 551 |
+
addition._should_ignore = should_ignore_info[addition.path_in_repo]
|
| 552 |
+
addition._remote_oid = oid_info[addition.path_in_repo]
|
| 553 |
+
|
| 554 |
+
# Empty files cannot be uploaded as LFS (S3 would fail with a 501 Not Implemented)
|
| 555 |
+
# => empty files are uploaded as "regular" to still allow users to commit them.
|
| 556 |
+
for addition in additions:
|
| 557 |
+
if addition.upload_info.size == 0:
|
| 558 |
+
addition._upload_mode = "regular"
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
@validate_hf_hub_args
|
| 562 |
+
def _fetch_files_to_copy(
|
| 563 |
+
copies: Iterable[CommitOperationCopy],
|
| 564 |
+
repo_type: str,
|
| 565 |
+
repo_id: str,
|
| 566 |
+
headers: Dict[str, str],
|
| 567 |
+
revision: str,
|
| 568 |
+
endpoint: Optional[str] = None,
|
| 569 |
+
) -> Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]]:
|
| 570 |
+
"""
|
| 571 |
+
Fetch information about the files to copy.
|
| 572 |
+
|
| 573 |
+
For LFS files, we only need their metadata (file size and sha256) while for regular files
|
| 574 |
+
we need to download the raw content from the Hub.
|
| 575 |
+
|
| 576 |
+
Args:
|
| 577 |
+
copies (`Iterable` of :class:`CommitOperationCopy`):
|
| 578 |
+
Iterable of :class:`CommitOperationCopy` describing the files to
|
| 579 |
+
copy on the Hub.
|
| 580 |
+
repo_type (`str`):
|
| 581 |
+
Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
|
| 582 |
+
repo_id (`str`):
|
| 583 |
+
A namespace (user or an organization) and a repo name separated
|
| 584 |
+
by a `/`.
|
| 585 |
+
headers (`Dict[str, str]`):
|
| 586 |
+
Headers to use for the request, including authorization headers and user agent.
|
| 587 |
+
revision (`str`):
|
| 588 |
+
The git revision to upload the files to. Can be any valid git revision.
|
| 589 |
+
|
| 590 |
+
Returns: `Dict[Tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
|
| 591 |
+
Key is the file path and revision of the file to copy.
|
| 592 |
+
Value is the raw content as bytes (for regular files) or the file information as a RepoFile (for LFS files).
|
| 593 |
+
|
| 594 |
+
Raises:
|
| 595 |
+
[`~utils.HfHubHTTPError`]
|
| 596 |
+
If the Hub API returned an error.
|
| 597 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 598 |
+
If the Hub API response is improperly formatted.
|
| 599 |
+
"""
|
| 600 |
+
from .hf_api import HfApi, RepoFolder
|
| 601 |
+
|
| 602 |
+
hf_api = HfApi(endpoint=endpoint, headers=headers)
|
| 603 |
+
files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
|
| 604 |
+
# Store (path, revision) -> oid mapping
|
| 605 |
+
oid_info: Dict[Tuple[str, Optional[str]], Optional[str]] = {}
|
| 606 |
+
# 1. Fetch OIDs for destination paths in batches.
|
| 607 |
+
dest_paths = [op.path_in_repo for op in copies]
|
| 608 |
+
for offset in range(0, len(dest_paths), FETCH_LFS_BATCH_SIZE):
|
| 609 |
+
dest_repo_files = hf_api.get_paths_info(
|
| 610 |
+
repo_id=repo_id,
|
| 611 |
+
paths=dest_paths[offset : offset + FETCH_LFS_BATCH_SIZE],
|
| 612 |
+
revision=revision,
|
| 613 |
+
repo_type=repo_type,
|
| 614 |
+
)
|
| 615 |
+
for file in dest_repo_files:
|
| 616 |
+
if not isinstance(file, RepoFolder):
|
| 617 |
+
oid_info[(file.path, revision)] = file.blob_id
|
| 618 |
+
|
| 619 |
+
# 2. Group by source revision and fetch source file info in batches.
|
| 620 |
+
for src_revision, operations in groupby(copies, key=lambda op: op.src_revision):
|
| 621 |
+
operations = list(operations) # type: ignore
|
| 622 |
+
src_paths = [op.src_path_in_repo for op in operations]
|
| 623 |
+
for offset in range(0, len(src_paths), FETCH_LFS_BATCH_SIZE):
|
| 624 |
+
src_repo_files = hf_api.get_paths_info(
|
| 625 |
+
repo_id=repo_id,
|
| 626 |
+
paths=src_paths[offset : offset + FETCH_LFS_BATCH_SIZE],
|
| 627 |
+
revision=src_revision or revision,
|
| 628 |
+
repo_type=repo_type,
|
| 629 |
+
)
|
| 630 |
+
|
| 631 |
+
for src_repo_file in src_repo_files:
|
| 632 |
+
if isinstance(src_repo_file, RepoFolder):
|
| 633 |
+
raise NotImplementedError("Copying a folder is not implemented.")
|
| 634 |
+
oid_info[(src_repo_file.path, src_revision)] = src_repo_file.blob_id
|
| 635 |
+
# If it's an LFS file, store the RepoFile object. Otherwise, download raw bytes.
|
| 636 |
+
if src_repo_file.lfs:
|
| 637 |
+
files_to_copy[(src_repo_file.path, src_revision)] = src_repo_file
|
| 638 |
+
else:
|
| 639 |
+
# TODO: (optimization) download regular files to copy concurrently
|
| 640 |
+
url = hf_hub_url(
|
| 641 |
+
endpoint=endpoint,
|
| 642 |
+
repo_type=repo_type,
|
| 643 |
+
repo_id=repo_id,
|
| 644 |
+
revision=src_revision or revision,
|
| 645 |
+
filename=src_repo_file.path,
|
| 646 |
+
)
|
| 647 |
+
response = get_session().get(url, headers=headers)
|
| 648 |
+
hf_raise_for_status(response)
|
| 649 |
+
files_to_copy[(src_repo_file.path, src_revision)] = response.content
|
| 650 |
+
# 3. Ensure all operations found a corresponding file in the Hub
|
| 651 |
+
# and track src/dest OIDs for each operation.
|
| 652 |
+
for operation in operations:
|
| 653 |
+
if (operation.src_path_in_repo, src_revision) not in files_to_copy:
|
| 654 |
+
raise EntryNotFoundError(
|
| 655 |
+
f"Cannot copy {operation.src_path_in_repo} at revision "
|
| 656 |
+
f"{src_revision or revision}: file is missing on repo."
|
| 657 |
+
)
|
| 658 |
+
operation._src_oid = oid_info.get((operation.src_path_in_repo, operation.src_revision))
|
| 659 |
+
operation._dest_oid = oid_info.get((operation.path_in_repo, revision))
|
| 660 |
+
return files_to_copy
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
def _prepare_commit_payload(
|
| 664 |
+
operations: Iterable[CommitOperation],
|
| 665 |
+
files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]],
|
| 666 |
+
commit_message: str,
|
| 667 |
+
commit_description: Optional[str] = None,
|
| 668 |
+
parent_commit: Optional[str] = None,
|
| 669 |
+
) -> Iterable[Dict[str, Any]]:
|
| 670 |
+
"""
|
| 671 |
+
Builds the payload to POST to the `/commit` API of the Hub.
|
| 672 |
+
|
| 673 |
+
Payload is returned as an iterator so that it can be streamed as a ndjson in the
|
| 674 |
+
POST request.
|
| 675 |
+
|
| 676 |
+
For more information, see:
|
| 677 |
+
- https://github.com/huggingface/huggingface_hub/issues/1085#issuecomment-1265208073
|
| 678 |
+
- http://ndjson.org/
|
| 679 |
+
"""
|
| 680 |
+
commit_description = commit_description if commit_description is not None else ""
|
| 681 |
+
|
| 682 |
+
# 1. Send a header item with the commit metadata
|
| 683 |
+
header_value = {"summary": commit_message, "description": commit_description}
|
| 684 |
+
if parent_commit is not None:
|
| 685 |
+
header_value["parentCommit"] = parent_commit
|
| 686 |
+
yield {"key": "header", "value": header_value}
|
| 687 |
+
|
| 688 |
+
nb_ignored_files = 0
|
| 689 |
+
|
| 690 |
+
# 2. Send operations, one per line
|
| 691 |
+
for operation in operations:
|
| 692 |
+
# Skip ignored files
|
| 693 |
+
if isinstance(operation, CommitOperationAdd) and operation._should_ignore:
|
| 694 |
+
logger.debug(f"Skipping file '{operation.path_in_repo}' in commit (ignored by gitignore file).")
|
| 695 |
+
nb_ignored_files += 1
|
| 696 |
+
continue
|
| 697 |
+
|
| 698 |
+
# 2.a. Case adding a regular file
|
| 699 |
+
if isinstance(operation, CommitOperationAdd) and operation._upload_mode == "regular":
|
| 700 |
+
yield {
|
| 701 |
+
"key": "file",
|
| 702 |
+
"value": {
|
| 703 |
+
"content": operation.b64content().decode(),
|
| 704 |
+
"path": operation.path_in_repo,
|
| 705 |
+
"encoding": "base64",
|
| 706 |
+
},
|
| 707 |
+
}
|
| 708 |
+
# 2.b. Case adding an LFS file
|
| 709 |
+
elif isinstance(operation, CommitOperationAdd) and operation._upload_mode == "lfs":
|
| 710 |
+
yield {
|
| 711 |
+
"key": "lfsFile",
|
| 712 |
+
"value": {
|
| 713 |
+
"path": operation.path_in_repo,
|
| 714 |
+
"algo": "sha256",
|
| 715 |
+
"oid": operation.upload_info.sha256.hex(),
|
| 716 |
+
"size": operation.upload_info.size,
|
| 717 |
+
},
|
| 718 |
+
}
|
| 719 |
+
# 2.c. Case deleting a file or folder
|
| 720 |
+
elif isinstance(operation, CommitOperationDelete):
|
| 721 |
+
yield {
|
| 722 |
+
"key": "deletedFolder" if operation.is_folder else "deletedFile",
|
| 723 |
+
"value": {"path": operation.path_in_repo},
|
| 724 |
+
}
|
| 725 |
+
# 2.d. Case copying a file or folder
|
| 726 |
+
elif isinstance(operation, CommitOperationCopy):
|
| 727 |
+
file_to_copy = files_to_copy[(operation.src_path_in_repo, operation.src_revision)]
|
| 728 |
+
if isinstance(file_to_copy, bytes):
|
| 729 |
+
yield {
|
| 730 |
+
"key": "file",
|
| 731 |
+
"value": {
|
| 732 |
+
"content": base64.b64encode(file_to_copy).decode(),
|
| 733 |
+
"path": operation.path_in_repo,
|
| 734 |
+
"encoding": "base64",
|
| 735 |
+
},
|
| 736 |
+
}
|
| 737 |
+
elif file_to_copy.lfs:
|
| 738 |
+
yield {
|
| 739 |
+
"key": "lfsFile",
|
| 740 |
+
"value": {
|
| 741 |
+
"path": operation.path_in_repo,
|
| 742 |
+
"algo": "sha256",
|
| 743 |
+
"oid": file_to_copy.lfs.sha256,
|
| 744 |
+
},
|
| 745 |
+
}
|
| 746 |
+
else:
|
| 747 |
+
raise ValueError(
|
| 748 |
+
"Malformed files_to_copy (should be raw file content as bytes or RepoFile objects with LFS info."
|
| 749 |
+
)
|
| 750 |
+
# 2.e. Never expected to happen
|
| 751 |
+
else:
|
| 752 |
+
raise ValueError(
|
| 753 |
+
f"Unknown operation to commit. Operation: {operation}. Upload mode:"
|
| 754 |
+
f" {getattr(operation, '_upload_mode', None)}"
|
| 755 |
+
)
|
| 756 |
+
|
| 757 |
+
if nb_ignored_files > 0:
|
| 758 |
+
logger.info(f"Skipped {nb_ignored_files} file(s) in commit (ignored by gitignore file).")
|
parrot/lib/python3.10/site-packages/huggingface_hub/_commit_scheduler.py
ADDED
|
@@ -0,0 +1,353 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import atexit
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import time
|
| 5 |
+
from concurrent.futures import Future
|
| 6 |
+
from dataclasses import dataclass
|
| 7 |
+
from io import SEEK_END, SEEK_SET, BytesIO
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from threading import Lock, Thread
|
| 10 |
+
from typing import Dict, List, Optional, Union
|
| 11 |
+
|
| 12 |
+
from .hf_api import DEFAULT_IGNORE_PATTERNS, CommitInfo, CommitOperationAdd, HfApi
|
| 13 |
+
from .utils import filter_repo_objects
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@dataclass(frozen=True)
|
| 20 |
+
class _FileToUpload:
|
| 21 |
+
"""Temporary dataclass to store info about files to upload. Not meant to be used directly."""
|
| 22 |
+
|
| 23 |
+
local_path: Path
|
| 24 |
+
path_in_repo: str
|
| 25 |
+
size_limit: int
|
| 26 |
+
last_modified: float
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class CommitScheduler:
|
| 30 |
+
"""
|
| 31 |
+
Scheduler to upload a local folder to the Hub at regular intervals (e.g. push to hub every 5 minutes).
|
| 32 |
+
|
| 33 |
+
The recommended way to use the scheduler is to use it as a context manager. This ensures that the scheduler is
|
| 34 |
+
properly stopped and the last commit is triggered when the script ends. The scheduler can also be stopped manually
|
| 35 |
+
with the `stop` method. Checkout the [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#scheduled-uploads)
|
| 36 |
+
to learn more about how to use it.
|
| 37 |
+
|
| 38 |
+
Args:
|
| 39 |
+
repo_id (`str`):
|
| 40 |
+
The id of the repo to commit to.
|
| 41 |
+
folder_path (`str` or `Path`):
|
| 42 |
+
Path to the local folder to upload regularly.
|
| 43 |
+
every (`int` or `float`, *optional*):
|
| 44 |
+
The number of minutes between each commit. Defaults to 5 minutes.
|
| 45 |
+
path_in_repo (`str`, *optional*):
|
| 46 |
+
Relative path of the directory in the repo, for example: `"checkpoints/"`. Defaults to the root folder
|
| 47 |
+
of the repository.
|
| 48 |
+
repo_type (`str`, *optional*):
|
| 49 |
+
The type of the repo to commit to. Defaults to `model`.
|
| 50 |
+
revision (`str`, *optional*):
|
| 51 |
+
The revision of the repo to commit to. Defaults to `main`.
|
| 52 |
+
private (`bool`, *optional*):
|
| 53 |
+
Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
|
| 54 |
+
token (`str`, *optional*):
|
| 55 |
+
The token to use to commit to the repo. Defaults to the token saved on the machine.
|
| 56 |
+
allow_patterns (`List[str]` or `str`, *optional*):
|
| 57 |
+
If provided, only files matching at least one pattern are uploaded.
|
| 58 |
+
ignore_patterns (`List[str]` or `str`, *optional*):
|
| 59 |
+
If provided, files matching any of the patterns are not uploaded.
|
| 60 |
+
squash_history (`bool`, *optional*):
|
| 61 |
+
Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is
|
| 62 |
+
useful to avoid degraded performances on the repo when it grows too large.
|
| 63 |
+
hf_api (`HfApi`, *optional*):
|
| 64 |
+
The [`HfApi`] client to use to commit to the Hub. Can be set with custom settings (user agent, token,...).
|
| 65 |
+
|
| 66 |
+
Example:
|
| 67 |
+
```py
|
| 68 |
+
>>> from pathlib import Path
|
| 69 |
+
>>> from huggingface_hub import CommitScheduler
|
| 70 |
+
|
| 71 |
+
# Scheduler uploads every 10 minutes
|
| 72 |
+
>>> csv_path = Path("watched_folder/data.csv")
|
| 73 |
+
>>> CommitScheduler(repo_id="test_scheduler", repo_type="dataset", folder_path=csv_path.parent, every=10)
|
| 74 |
+
|
| 75 |
+
>>> with csv_path.open("a") as f:
|
| 76 |
+
... f.write("first line")
|
| 77 |
+
|
| 78 |
+
# Some time later (...)
|
| 79 |
+
>>> with csv_path.open("a") as f:
|
| 80 |
+
... f.write("second line")
|
| 81 |
+
```
|
| 82 |
+
|
| 83 |
+
Example using a context manager:
|
| 84 |
+
```py
|
| 85 |
+
>>> from pathlib import Path
|
| 86 |
+
>>> from huggingface_hub import CommitScheduler
|
| 87 |
+
|
| 88 |
+
>>> with CommitScheduler(repo_id="test_scheduler", repo_type="dataset", folder_path="watched_folder", every=10) as scheduler:
|
| 89 |
+
... csv_path = Path("watched_folder/data.csv")
|
| 90 |
+
... with csv_path.open("a") as f:
|
| 91 |
+
... f.write("first line")
|
| 92 |
+
... (...)
|
| 93 |
+
... with csv_path.open("a") as f:
|
| 94 |
+
... f.write("second line")
|
| 95 |
+
|
| 96 |
+
# Scheduler is now stopped and last commit have been triggered
|
| 97 |
+
```
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
def __init__(
|
| 101 |
+
self,
|
| 102 |
+
*,
|
| 103 |
+
repo_id: str,
|
| 104 |
+
folder_path: Union[str, Path],
|
| 105 |
+
every: Union[int, float] = 5,
|
| 106 |
+
path_in_repo: Optional[str] = None,
|
| 107 |
+
repo_type: Optional[str] = None,
|
| 108 |
+
revision: Optional[str] = None,
|
| 109 |
+
private: Optional[bool] = None,
|
| 110 |
+
token: Optional[str] = None,
|
| 111 |
+
allow_patterns: Optional[Union[List[str], str]] = None,
|
| 112 |
+
ignore_patterns: Optional[Union[List[str], str]] = None,
|
| 113 |
+
squash_history: bool = False,
|
| 114 |
+
hf_api: Optional["HfApi"] = None,
|
| 115 |
+
) -> None:
|
| 116 |
+
self.api = hf_api or HfApi(token=token)
|
| 117 |
+
|
| 118 |
+
# Folder
|
| 119 |
+
self.folder_path = Path(folder_path).expanduser().resolve()
|
| 120 |
+
self.path_in_repo = path_in_repo or ""
|
| 121 |
+
self.allow_patterns = allow_patterns
|
| 122 |
+
|
| 123 |
+
if ignore_patterns is None:
|
| 124 |
+
ignore_patterns = []
|
| 125 |
+
elif isinstance(ignore_patterns, str):
|
| 126 |
+
ignore_patterns = [ignore_patterns]
|
| 127 |
+
self.ignore_patterns = ignore_patterns + DEFAULT_IGNORE_PATTERNS
|
| 128 |
+
|
| 129 |
+
if self.folder_path.is_file():
|
| 130 |
+
raise ValueError(f"'folder_path' must be a directory, not a file: '{self.folder_path}'.")
|
| 131 |
+
self.folder_path.mkdir(parents=True, exist_ok=True)
|
| 132 |
+
|
| 133 |
+
# Repository
|
| 134 |
+
repo_url = self.api.create_repo(repo_id=repo_id, private=private, repo_type=repo_type, exist_ok=True)
|
| 135 |
+
self.repo_id = repo_url.repo_id
|
| 136 |
+
self.repo_type = repo_type
|
| 137 |
+
self.revision = revision
|
| 138 |
+
self.token = token
|
| 139 |
+
|
| 140 |
+
# Keep track of already uploaded files
|
| 141 |
+
self.last_uploaded: Dict[Path, float] = {} # key is local path, value is timestamp
|
| 142 |
+
|
| 143 |
+
# Scheduler
|
| 144 |
+
if not every > 0:
|
| 145 |
+
raise ValueError(f"'every' must be a positive integer, not '{every}'.")
|
| 146 |
+
self.lock = Lock()
|
| 147 |
+
self.every = every
|
| 148 |
+
self.squash_history = squash_history
|
| 149 |
+
|
| 150 |
+
logger.info(f"Scheduled job to push '{self.folder_path}' to '{self.repo_id}' every {self.every} minutes.")
|
| 151 |
+
self._scheduler_thread = Thread(target=self._run_scheduler, daemon=True)
|
| 152 |
+
self._scheduler_thread.start()
|
| 153 |
+
atexit.register(self._push_to_hub)
|
| 154 |
+
|
| 155 |
+
self.__stopped = False
|
| 156 |
+
|
| 157 |
+
def stop(self) -> None:
|
| 158 |
+
"""Stop the scheduler.
|
| 159 |
+
|
| 160 |
+
A stopped scheduler cannot be restarted. Mostly for tests purposes.
|
| 161 |
+
"""
|
| 162 |
+
self.__stopped = True
|
| 163 |
+
|
| 164 |
+
def __enter__(self) -> "CommitScheduler":
|
| 165 |
+
return self
|
| 166 |
+
|
| 167 |
+
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
| 168 |
+
# Upload last changes before exiting
|
| 169 |
+
self.trigger().result()
|
| 170 |
+
self.stop()
|
| 171 |
+
return
|
| 172 |
+
|
| 173 |
+
def _run_scheduler(self) -> None:
|
| 174 |
+
"""Dumb thread waiting between each scheduled push to Hub."""
|
| 175 |
+
while True:
|
| 176 |
+
self.last_future = self.trigger()
|
| 177 |
+
time.sleep(self.every * 60)
|
| 178 |
+
if self.__stopped:
|
| 179 |
+
break
|
| 180 |
+
|
| 181 |
+
def trigger(self) -> Future:
|
| 182 |
+
"""Trigger a `push_to_hub` and return a future.
|
| 183 |
+
|
| 184 |
+
This method is automatically called every `every` minutes. You can also call it manually to trigger a commit
|
| 185 |
+
immediately, without waiting for the next scheduled commit.
|
| 186 |
+
"""
|
| 187 |
+
return self.api.run_as_future(self._push_to_hub)
|
| 188 |
+
|
| 189 |
+
def _push_to_hub(self) -> Optional[CommitInfo]:
|
| 190 |
+
if self.__stopped: # If stopped, already scheduled commits are ignored
|
| 191 |
+
return None
|
| 192 |
+
|
| 193 |
+
logger.info("(Background) scheduled commit triggered.")
|
| 194 |
+
try:
|
| 195 |
+
value = self.push_to_hub()
|
| 196 |
+
if self.squash_history:
|
| 197 |
+
logger.info("(Background) squashing repo history.")
|
| 198 |
+
self.api.super_squash_history(repo_id=self.repo_id, repo_type=self.repo_type, branch=self.revision)
|
| 199 |
+
return value
|
| 200 |
+
except Exception as e:
|
| 201 |
+
logger.error(f"Error while pushing to Hub: {e}") # Depending on the setup, error might be silenced
|
| 202 |
+
raise
|
| 203 |
+
|
| 204 |
+
def push_to_hub(self) -> Optional[CommitInfo]:
|
| 205 |
+
"""
|
| 206 |
+
Push folder to the Hub and return the commit info.
|
| 207 |
+
|
| 208 |
+
<Tip warning={true}>
|
| 209 |
+
|
| 210 |
+
This method is not meant to be called directly. It is run in the background by the scheduler, respecting a
|
| 211 |
+
queue mechanism to avoid concurrent commits. Making a direct call to the method might lead to concurrency
|
| 212 |
+
issues.
|
| 213 |
+
|
| 214 |
+
</Tip>
|
| 215 |
+
|
| 216 |
+
The default behavior of `push_to_hub` is to assume an append-only folder. It lists all files in the folder and
|
| 217 |
+
uploads only changed files. If no changes are found, the method returns without committing anything. If you want
|
| 218 |
+
to change this behavior, you can inherit from [`CommitScheduler`] and override this method. This can be useful
|
| 219 |
+
for example to compress data together in a single file before committing. For more details and examples, check
|
| 220 |
+
out our [integration guide](https://huggingface.co/docs/huggingface_hub/main/en/guides/upload#scheduled-uploads).
|
| 221 |
+
"""
|
| 222 |
+
# Check files to upload (with lock)
|
| 223 |
+
with self.lock:
|
| 224 |
+
logger.debug("Listing files to upload for scheduled commit.")
|
| 225 |
+
|
| 226 |
+
# List files from folder (taken from `_prepare_upload_folder_additions`)
|
| 227 |
+
relpath_to_abspath = {
|
| 228 |
+
path.relative_to(self.folder_path).as_posix(): path
|
| 229 |
+
for path in sorted(self.folder_path.glob("**/*")) # sorted to be deterministic
|
| 230 |
+
if path.is_file()
|
| 231 |
+
}
|
| 232 |
+
prefix = f"{self.path_in_repo.strip('/')}/" if self.path_in_repo else ""
|
| 233 |
+
|
| 234 |
+
# Filter with pattern + filter out unchanged files + retrieve current file size
|
| 235 |
+
files_to_upload: List[_FileToUpload] = []
|
| 236 |
+
for relpath in filter_repo_objects(
|
| 237 |
+
relpath_to_abspath.keys(), allow_patterns=self.allow_patterns, ignore_patterns=self.ignore_patterns
|
| 238 |
+
):
|
| 239 |
+
local_path = relpath_to_abspath[relpath]
|
| 240 |
+
stat = local_path.stat()
|
| 241 |
+
if self.last_uploaded.get(local_path) is None or self.last_uploaded[local_path] != stat.st_mtime:
|
| 242 |
+
files_to_upload.append(
|
| 243 |
+
_FileToUpload(
|
| 244 |
+
local_path=local_path,
|
| 245 |
+
path_in_repo=prefix + relpath,
|
| 246 |
+
size_limit=stat.st_size,
|
| 247 |
+
last_modified=stat.st_mtime,
|
| 248 |
+
)
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
# Return if nothing to upload
|
| 252 |
+
if len(files_to_upload) == 0:
|
| 253 |
+
logger.debug("Dropping schedule commit: no changed file to upload.")
|
| 254 |
+
return None
|
| 255 |
+
|
| 256 |
+
# Convert `_FileToUpload` as `CommitOperationAdd` (=> compute file shas + limit to file size)
|
| 257 |
+
logger.debug("Removing unchanged files since previous scheduled commit.")
|
| 258 |
+
add_operations = [
|
| 259 |
+
CommitOperationAdd(
|
| 260 |
+
# Cap the file to its current size, even if the user append data to it while a scheduled commit is happening
|
| 261 |
+
path_or_fileobj=PartialFileIO(file_to_upload.local_path, size_limit=file_to_upload.size_limit),
|
| 262 |
+
path_in_repo=file_to_upload.path_in_repo,
|
| 263 |
+
)
|
| 264 |
+
for file_to_upload in files_to_upload
|
| 265 |
+
]
|
| 266 |
+
|
| 267 |
+
# Upload files (append mode expected - no need for lock)
|
| 268 |
+
logger.debug("Uploading files for scheduled commit.")
|
| 269 |
+
commit_info = self.api.create_commit(
|
| 270 |
+
repo_id=self.repo_id,
|
| 271 |
+
repo_type=self.repo_type,
|
| 272 |
+
operations=add_operations,
|
| 273 |
+
commit_message="Scheduled Commit",
|
| 274 |
+
revision=self.revision,
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
# Successful commit: keep track of the latest "last_modified" for each file
|
| 278 |
+
for file in files_to_upload:
|
| 279 |
+
self.last_uploaded[file.local_path] = file.last_modified
|
| 280 |
+
return commit_info
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
class PartialFileIO(BytesIO):
|
| 284 |
+
"""A file-like object that reads only the first part of a file.
|
| 285 |
+
|
| 286 |
+
Useful to upload a file to the Hub when the user might still be appending data to it. Only the first part of the
|
| 287 |
+
file is uploaded (i.e. the part that was available when the filesystem was first scanned).
|
| 288 |
+
|
| 289 |
+
In practice, only used internally by the CommitScheduler to regularly push a folder to the Hub with minimal
|
| 290 |
+
disturbance for the user. The object is passed to `CommitOperationAdd`.
|
| 291 |
+
|
| 292 |
+
Only supports `read`, `tell` and `seek` methods.
|
| 293 |
+
|
| 294 |
+
Args:
|
| 295 |
+
file_path (`str` or `Path`):
|
| 296 |
+
Path to the file to read.
|
| 297 |
+
size_limit (`int`):
|
| 298 |
+
The maximum number of bytes to read from the file. If the file is larger than this, only the first part
|
| 299 |
+
will be read (and uploaded).
|
| 300 |
+
"""
|
| 301 |
+
|
| 302 |
+
def __init__(self, file_path: Union[str, Path], size_limit: int) -> None:
|
| 303 |
+
self._file_path = Path(file_path)
|
| 304 |
+
self._file = self._file_path.open("rb")
|
| 305 |
+
self._size_limit = min(size_limit, os.fstat(self._file.fileno()).st_size)
|
| 306 |
+
|
| 307 |
+
def __del__(self) -> None:
|
| 308 |
+
self._file.close()
|
| 309 |
+
return super().__del__()
|
| 310 |
+
|
| 311 |
+
def __repr__(self) -> str:
|
| 312 |
+
return f"<PartialFileIO file_path={self._file_path} size_limit={self._size_limit}>"
|
| 313 |
+
|
| 314 |
+
def __len__(self) -> int:
|
| 315 |
+
return self._size_limit
|
| 316 |
+
|
| 317 |
+
def __getattribute__(self, name: str):
|
| 318 |
+
if name.startswith("_") or name in ("read", "tell", "seek"): # only 3 public methods supported
|
| 319 |
+
return super().__getattribute__(name)
|
| 320 |
+
raise NotImplementedError(f"PartialFileIO does not support '{name}'.")
|
| 321 |
+
|
| 322 |
+
def tell(self) -> int:
|
| 323 |
+
"""Return the current file position."""
|
| 324 |
+
return self._file.tell()
|
| 325 |
+
|
| 326 |
+
def seek(self, __offset: int, __whence: int = SEEK_SET) -> int:
|
| 327 |
+
"""Change the stream position to the given offset.
|
| 328 |
+
|
| 329 |
+
Behavior is the same as a regular file, except that the position is capped to the size limit.
|
| 330 |
+
"""
|
| 331 |
+
if __whence == SEEK_END:
|
| 332 |
+
# SEEK_END => set from the truncated end
|
| 333 |
+
__offset = len(self) + __offset
|
| 334 |
+
__whence = SEEK_SET
|
| 335 |
+
|
| 336 |
+
pos = self._file.seek(__offset, __whence)
|
| 337 |
+
if pos > self._size_limit:
|
| 338 |
+
return self._file.seek(self._size_limit)
|
| 339 |
+
return pos
|
| 340 |
+
|
| 341 |
+
def read(self, __size: Optional[int] = -1) -> bytes:
|
| 342 |
+
"""Read at most `__size` bytes from the file.
|
| 343 |
+
|
| 344 |
+
Behavior is the same as a regular file, except that it is capped to the size limit.
|
| 345 |
+
"""
|
| 346 |
+
current = self._file.tell()
|
| 347 |
+
if __size is None or __size < 0:
|
| 348 |
+
# Read until file limit
|
| 349 |
+
truncated_size = self._size_limit - current
|
| 350 |
+
else:
|
| 351 |
+
# Read until file limit or __size
|
| 352 |
+
truncated_size = min(__size, self._size_limit - current)
|
| 353 |
+
return self._file.read(truncated_size)
|
parrot/lib/python3.10/site-packages/huggingface_hub/_inference_endpoints.py
ADDED
|
@@ -0,0 +1,407 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import time
|
| 2 |
+
from dataclasses import dataclass, field
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from enum import Enum
|
| 5 |
+
from typing import TYPE_CHECKING, Dict, Optional, Union
|
| 6 |
+
|
| 7 |
+
from huggingface_hub.errors import InferenceEndpointError, InferenceEndpointTimeoutError
|
| 8 |
+
|
| 9 |
+
from .inference._client import InferenceClient
|
| 10 |
+
from .inference._generated._async_client import AsyncInferenceClient
|
| 11 |
+
from .utils import get_session, logging, parse_datetime
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
from .hf_api import HfApi
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
logger = logging.get_logger(__name__)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class InferenceEndpointStatus(str, Enum):
|
| 22 |
+
PENDING = "pending"
|
| 23 |
+
INITIALIZING = "initializing"
|
| 24 |
+
UPDATING = "updating"
|
| 25 |
+
UPDATE_FAILED = "updateFailed"
|
| 26 |
+
RUNNING = "running"
|
| 27 |
+
PAUSED = "paused"
|
| 28 |
+
FAILED = "failed"
|
| 29 |
+
SCALED_TO_ZERO = "scaledToZero"
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class InferenceEndpointType(str, Enum):
|
| 33 |
+
PUBlIC = "public"
|
| 34 |
+
PROTECTED = "protected"
|
| 35 |
+
PRIVATE = "private"
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@dataclass
|
| 39 |
+
class InferenceEndpoint:
|
| 40 |
+
"""
|
| 41 |
+
Contains information about a deployed Inference Endpoint.
|
| 42 |
+
|
| 43 |
+
Args:
|
| 44 |
+
name (`str`):
|
| 45 |
+
The unique name of the Inference Endpoint.
|
| 46 |
+
namespace (`str`):
|
| 47 |
+
The namespace where the Inference Endpoint is located.
|
| 48 |
+
repository (`str`):
|
| 49 |
+
The name of the model repository deployed on this Inference Endpoint.
|
| 50 |
+
status ([`InferenceEndpointStatus`]):
|
| 51 |
+
The current status of the Inference Endpoint.
|
| 52 |
+
url (`str`, *optional*):
|
| 53 |
+
The URL of the Inference Endpoint, if available. Only a deployed Inference Endpoint will have a URL.
|
| 54 |
+
framework (`str`):
|
| 55 |
+
The machine learning framework used for the model.
|
| 56 |
+
revision (`str`):
|
| 57 |
+
The specific model revision deployed on the Inference Endpoint.
|
| 58 |
+
task (`str`):
|
| 59 |
+
The task associated with the deployed model.
|
| 60 |
+
created_at (`datetime.datetime`):
|
| 61 |
+
The timestamp when the Inference Endpoint was created.
|
| 62 |
+
updated_at (`datetime.datetime`):
|
| 63 |
+
The timestamp of the last update of the Inference Endpoint.
|
| 64 |
+
type ([`InferenceEndpointType`]):
|
| 65 |
+
The type of the Inference Endpoint (public, protected, private).
|
| 66 |
+
raw (`Dict`):
|
| 67 |
+
The raw dictionary data returned from the API.
|
| 68 |
+
token (`str` or `bool`, *optional*):
|
| 69 |
+
Authentication token for the Inference Endpoint, if set when requesting the API. Will default to the
|
| 70 |
+
locally saved token if not provided. Pass `token=False` if you don't want to send your token to the server.
|
| 71 |
+
|
| 72 |
+
Example:
|
| 73 |
+
```python
|
| 74 |
+
>>> from huggingface_hub import get_inference_endpoint
|
| 75 |
+
>>> endpoint = get_inference_endpoint("my-text-to-image")
|
| 76 |
+
>>> endpoint
|
| 77 |
+
InferenceEndpoint(name='my-text-to-image', ...)
|
| 78 |
+
|
| 79 |
+
# Get status
|
| 80 |
+
>>> endpoint.status
|
| 81 |
+
'running'
|
| 82 |
+
>>> endpoint.url
|
| 83 |
+
'https://my-text-to-image.region.vendor.endpoints.huggingface.cloud'
|
| 84 |
+
|
| 85 |
+
# Run inference
|
| 86 |
+
>>> endpoint.client.text_to_image(...)
|
| 87 |
+
|
| 88 |
+
# Pause endpoint to save $$$
|
| 89 |
+
>>> endpoint.pause()
|
| 90 |
+
|
| 91 |
+
# ...
|
| 92 |
+
# Resume and wait for deployment
|
| 93 |
+
>>> endpoint.resume()
|
| 94 |
+
>>> endpoint.wait()
|
| 95 |
+
>>> endpoint.client.text_to_image(...)
|
| 96 |
+
```
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
# Field in __repr__
|
| 100 |
+
name: str = field(init=False)
|
| 101 |
+
namespace: str
|
| 102 |
+
repository: str = field(init=False)
|
| 103 |
+
status: InferenceEndpointStatus = field(init=False)
|
| 104 |
+
url: Optional[str] = field(init=False)
|
| 105 |
+
|
| 106 |
+
# Other fields
|
| 107 |
+
framework: str = field(repr=False, init=False)
|
| 108 |
+
revision: str = field(repr=False, init=False)
|
| 109 |
+
task: str = field(repr=False, init=False)
|
| 110 |
+
created_at: datetime = field(repr=False, init=False)
|
| 111 |
+
updated_at: datetime = field(repr=False, init=False)
|
| 112 |
+
type: InferenceEndpointType = field(repr=False, init=False)
|
| 113 |
+
|
| 114 |
+
# Raw dict from the API
|
| 115 |
+
raw: Dict = field(repr=False)
|
| 116 |
+
|
| 117 |
+
# Internal fields
|
| 118 |
+
_token: Union[str, bool, None] = field(repr=False, compare=False)
|
| 119 |
+
_api: "HfApi" = field(repr=False, compare=False)
|
| 120 |
+
|
| 121 |
+
@classmethod
|
| 122 |
+
def from_raw(
|
| 123 |
+
cls, raw: Dict, namespace: str, token: Union[str, bool, None] = None, api: Optional["HfApi"] = None
|
| 124 |
+
) -> "InferenceEndpoint":
|
| 125 |
+
"""Initialize object from raw dictionary."""
|
| 126 |
+
if api is None:
|
| 127 |
+
from .hf_api import HfApi
|
| 128 |
+
|
| 129 |
+
api = HfApi()
|
| 130 |
+
if token is None:
|
| 131 |
+
token = api.token
|
| 132 |
+
|
| 133 |
+
# All other fields are populated in __post_init__
|
| 134 |
+
return cls(raw=raw, namespace=namespace, _token=token, _api=api)
|
| 135 |
+
|
| 136 |
+
def __post_init__(self) -> None:
|
| 137 |
+
"""Populate fields from raw dictionary."""
|
| 138 |
+
self._populate_from_raw()
|
| 139 |
+
|
| 140 |
+
@property
|
| 141 |
+
def client(self) -> InferenceClient:
|
| 142 |
+
"""Returns a client to make predictions on this Inference Endpoint.
|
| 143 |
+
|
| 144 |
+
Returns:
|
| 145 |
+
[`InferenceClient`]: an inference client pointing to the deployed endpoint.
|
| 146 |
+
|
| 147 |
+
Raises:
|
| 148 |
+
[`InferenceEndpointError`]: If the Inference Endpoint is not yet deployed.
|
| 149 |
+
"""
|
| 150 |
+
if self.url is None:
|
| 151 |
+
raise InferenceEndpointError(
|
| 152 |
+
"Cannot create a client for this Inference Endpoint as it is not yet deployed. "
|
| 153 |
+
"Please wait for the Inference Endpoint to be deployed using `endpoint.wait()` and try again."
|
| 154 |
+
)
|
| 155 |
+
return InferenceClient(
|
| 156 |
+
model=self.url,
|
| 157 |
+
token=self._token, # type: ignore[arg-type] # boolean token shouldn't be possible. In practice it's ok.
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
@property
|
| 161 |
+
def async_client(self) -> AsyncInferenceClient:
|
| 162 |
+
"""Returns a client to make predictions on this Inference Endpoint.
|
| 163 |
+
|
| 164 |
+
Returns:
|
| 165 |
+
[`AsyncInferenceClient`]: an asyncio-compatible inference client pointing to the deployed endpoint.
|
| 166 |
+
|
| 167 |
+
Raises:
|
| 168 |
+
[`InferenceEndpointError`]: If the Inference Endpoint is not yet deployed.
|
| 169 |
+
"""
|
| 170 |
+
if self.url is None:
|
| 171 |
+
raise InferenceEndpointError(
|
| 172 |
+
"Cannot create a client for this Inference Endpoint as it is not yet deployed. "
|
| 173 |
+
"Please wait for the Inference Endpoint to be deployed using `endpoint.wait()` and try again."
|
| 174 |
+
)
|
| 175 |
+
return AsyncInferenceClient(
|
| 176 |
+
model=self.url,
|
| 177 |
+
token=self._token, # type: ignore[arg-type] # boolean token shouldn't be possible. In practice it's ok.
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
def wait(self, timeout: Optional[int] = None, refresh_every: int = 5) -> "InferenceEndpoint":
|
| 181 |
+
"""Wait for the Inference Endpoint to be deployed.
|
| 182 |
+
|
| 183 |
+
Information from the server will be fetched every 1s. If the Inference Endpoint is not deployed after `timeout`
|
| 184 |
+
seconds, a [`InferenceEndpointTimeoutError`] will be raised. The [`InferenceEndpoint`] will be mutated in place with the latest
|
| 185 |
+
data.
|
| 186 |
+
|
| 187 |
+
Args:
|
| 188 |
+
timeout (`int`, *optional*):
|
| 189 |
+
The maximum time to wait for the Inference Endpoint to be deployed, in seconds. If `None`, will wait
|
| 190 |
+
indefinitely.
|
| 191 |
+
refresh_every (`int`, *optional*):
|
| 192 |
+
The time to wait between each fetch of the Inference Endpoint status, in seconds. Defaults to 5s.
|
| 193 |
+
|
| 194 |
+
Returns:
|
| 195 |
+
[`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.
|
| 196 |
+
|
| 197 |
+
Raises:
|
| 198 |
+
[`InferenceEndpointError`]
|
| 199 |
+
If the Inference Endpoint ended up in a failed state.
|
| 200 |
+
[`InferenceEndpointTimeoutError`]
|
| 201 |
+
If the Inference Endpoint is not deployed after `timeout` seconds.
|
| 202 |
+
"""
|
| 203 |
+
if timeout is not None and timeout < 0:
|
| 204 |
+
raise ValueError("`timeout` cannot be negative.")
|
| 205 |
+
if refresh_every <= 0:
|
| 206 |
+
raise ValueError("`refresh_every` must be positive.")
|
| 207 |
+
|
| 208 |
+
start = time.time()
|
| 209 |
+
while True:
|
| 210 |
+
if self.status == InferenceEndpointStatus.FAILED:
|
| 211 |
+
raise InferenceEndpointError(
|
| 212 |
+
f"Inference Endpoint {self.name} failed to deploy. Please check the logs for more information."
|
| 213 |
+
)
|
| 214 |
+
if self.status == InferenceEndpointStatus.UPDATE_FAILED:
|
| 215 |
+
raise InferenceEndpointError(
|
| 216 |
+
f"Inference Endpoint {self.name} failed to update. Please check the logs for more information."
|
| 217 |
+
)
|
| 218 |
+
if self.status == InferenceEndpointStatus.RUNNING and self.url is not None:
|
| 219 |
+
# Verify the endpoint is actually reachable
|
| 220 |
+
response = get_session().get(self.url, headers=self._api._build_hf_headers(token=self._token))
|
| 221 |
+
if response.status_code == 200:
|
| 222 |
+
logger.info("Inference Endpoint is ready to be used.")
|
| 223 |
+
return self
|
| 224 |
+
|
| 225 |
+
if timeout is not None:
|
| 226 |
+
if time.time() - start > timeout:
|
| 227 |
+
raise InferenceEndpointTimeoutError("Timeout while waiting for Inference Endpoint to be deployed.")
|
| 228 |
+
logger.info(f"Inference Endpoint is not deployed yet ({self.status}). Waiting {refresh_every}s...")
|
| 229 |
+
time.sleep(refresh_every)
|
| 230 |
+
self.fetch()
|
| 231 |
+
|
| 232 |
+
def fetch(self) -> "InferenceEndpoint":
|
| 233 |
+
"""Fetch latest information about the Inference Endpoint.
|
| 234 |
+
|
| 235 |
+
Returns:
|
| 236 |
+
[`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.
|
| 237 |
+
"""
|
| 238 |
+
obj = self._api.get_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type]
|
| 239 |
+
self.raw = obj.raw
|
| 240 |
+
self._populate_from_raw()
|
| 241 |
+
return self
|
| 242 |
+
|
| 243 |
+
def update(
|
| 244 |
+
self,
|
| 245 |
+
*,
|
| 246 |
+
# Compute update
|
| 247 |
+
accelerator: Optional[str] = None,
|
| 248 |
+
instance_size: Optional[str] = None,
|
| 249 |
+
instance_type: Optional[str] = None,
|
| 250 |
+
min_replica: Optional[int] = None,
|
| 251 |
+
max_replica: Optional[int] = None,
|
| 252 |
+
scale_to_zero_timeout: Optional[int] = None,
|
| 253 |
+
# Model update
|
| 254 |
+
repository: Optional[str] = None,
|
| 255 |
+
framework: Optional[str] = None,
|
| 256 |
+
revision: Optional[str] = None,
|
| 257 |
+
task: Optional[str] = None,
|
| 258 |
+
custom_image: Optional[Dict] = None,
|
| 259 |
+
secrets: Optional[Dict[str, str]] = None,
|
| 260 |
+
) -> "InferenceEndpoint":
|
| 261 |
+
"""Update the Inference Endpoint.
|
| 262 |
+
|
| 263 |
+
This method allows the update of either the compute configuration, the deployed model, or both. All arguments are
|
| 264 |
+
optional but at least one must be provided.
|
| 265 |
+
|
| 266 |
+
This is an alias for [`HfApi.update_inference_endpoint`]. The current object is mutated in place with the
|
| 267 |
+
latest data from the server.
|
| 268 |
+
|
| 269 |
+
Args:
|
| 270 |
+
accelerator (`str`, *optional*):
|
| 271 |
+
The hardware accelerator to be used for inference (e.g. `"cpu"`).
|
| 272 |
+
instance_size (`str`, *optional*):
|
| 273 |
+
The size or type of the instance to be used for hosting the model (e.g. `"x4"`).
|
| 274 |
+
instance_type (`str`, *optional*):
|
| 275 |
+
The cloud instance type where the Inference Endpoint will be deployed (e.g. `"intel-icl"`).
|
| 276 |
+
min_replica (`int`, *optional*):
|
| 277 |
+
The minimum number of replicas (instances) to keep running for the Inference Endpoint.
|
| 278 |
+
max_replica (`int`, *optional*):
|
| 279 |
+
The maximum number of replicas (instances) to scale to for the Inference Endpoint.
|
| 280 |
+
scale_to_zero_timeout (`int`, *optional*):
|
| 281 |
+
The duration in minutes before an inactive endpoint is scaled to zero.
|
| 282 |
+
|
| 283 |
+
repository (`str`, *optional*):
|
| 284 |
+
The name of the model repository associated with the Inference Endpoint (e.g. `"gpt2"`).
|
| 285 |
+
framework (`str`, *optional*):
|
| 286 |
+
The machine learning framework used for the model (e.g. `"custom"`).
|
| 287 |
+
revision (`str`, *optional*):
|
| 288 |
+
The specific model revision to deploy on the Inference Endpoint (e.g. `"6c0e6080953db56375760c0471a8c5f2929baf11"`).
|
| 289 |
+
task (`str`, *optional*):
|
| 290 |
+
The task on which to deploy the model (e.g. `"text-classification"`).
|
| 291 |
+
custom_image (`Dict`, *optional*):
|
| 292 |
+
A custom Docker image to use for the Inference Endpoint. This is useful if you want to deploy an
|
| 293 |
+
Inference Endpoint running on the `text-generation-inference` (TGI) framework (see examples).
|
| 294 |
+
secrets (`Dict[str, str]`, *optional*):
|
| 295 |
+
Secret values to inject in the container environment.
|
| 296 |
+
Returns:
|
| 297 |
+
[`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.
|
| 298 |
+
"""
|
| 299 |
+
# Make API call
|
| 300 |
+
obj = self._api.update_inference_endpoint(
|
| 301 |
+
name=self.name,
|
| 302 |
+
namespace=self.namespace,
|
| 303 |
+
accelerator=accelerator,
|
| 304 |
+
instance_size=instance_size,
|
| 305 |
+
instance_type=instance_type,
|
| 306 |
+
min_replica=min_replica,
|
| 307 |
+
max_replica=max_replica,
|
| 308 |
+
scale_to_zero_timeout=scale_to_zero_timeout,
|
| 309 |
+
repository=repository,
|
| 310 |
+
framework=framework,
|
| 311 |
+
revision=revision,
|
| 312 |
+
task=task,
|
| 313 |
+
custom_image=custom_image,
|
| 314 |
+
secrets=secrets,
|
| 315 |
+
token=self._token, # type: ignore [arg-type]
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
# Mutate current object
|
| 319 |
+
self.raw = obj.raw
|
| 320 |
+
self._populate_from_raw()
|
| 321 |
+
return self
|
| 322 |
+
|
| 323 |
+
def pause(self) -> "InferenceEndpoint":
|
| 324 |
+
"""Pause the Inference Endpoint.
|
| 325 |
+
|
| 326 |
+
A paused Inference Endpoint will not be charged. It can be resumed at any time using [`InferenceEndpoint.resume`].
|
| 327 |
+
This is different than scaling the Inference Endpoint to zero with [`InferenceEndpoint.scale_to_zero`], which
|
| 328 |
+
would be automatically restarted when a request is made to it.
|
| 329 |
+
|
| 330 |
+
This is an alias for [`HfApi.pause_inference_endpoint`]. The current object is mutated in place with the
|
| 331 |
+
latest data from the server.
|
| 332 |
+
|
| 333 |
+
Returns:
|
| 334 |
+
[`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.
|
| 335 |
+
"""
|
| 336 |
+
obj = self._api.pause_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type]
|
| 337 |
+
self.raw = obj.raw
|
| 338 |
+
self._populate_from_raw()
|
| 339 |
+
return self
|
| 340 |
+
|
| 341 |
+
def resume(self, running_ok: bool = True) -> "InferenceEndpoint":
|
| 342 |
+
"""Resume the Inference Endpoint.
|
| 343 |
+
|
| 344 |
+
This is an alias for [`HfApi.resume_inference_endpoint`]. The current object is mutated in place with the
|
| 345 |
+
latest data from the server.
|
| 346 |
+
|
| 347 |
+
Args:
|
| 348 |
+
running_ok (`bool`, *optional*):
|
| 349 |
+
If `True`, the method will not raise an error if the Inference Endpoint is already running. Defaults to
|
| 350 |
+
`True`.
|
| 351 |
+
|
| 352 |
+
Returns:
|
| 353 |
+
[`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.
|
| 354 |
+
"""
|
| 355 |
+
obj = self._api.resume_inference_endpoint(
|
| 356 |
+
name=self.name, namespace=self.namespace, running_ok=running_ok, token=self._token
|
| 357 |
+
) # type: ignore [arg-type]
|
| 358 |
+
self.raw = obj.raw
|
| 359 |
+
self._populate_from_raw()
|
| 360 |
+
return self
|
| 361 |
+
|
| 362 |
+
def scale_to_zero(self) -> "InferenceEndpoint":
|
| 363 |
+
"""Scale Inference Endpoint to zero.
|
| 364 |
+
|
| 365 |
+
An Inference Endpoint scaled to zero will not be charged. It will be resume on the next request to it, with a
|
| 366 |
+
cold start delay. This is different than pausing the Inference Endpoint with [`InferenceEndpoint.pause`], which
|
| 367 |
+
would require a manual resume with [`InferenceEndpoint.resume`].
|
| 368 |
+
|
| 369 |
+
This is an alias for [`HfApi.scale_to_zero_inference_endpoint`]. The current object is mutated in place with the
|
| 370 |
+
latest data from the server.
|
| 371 |
+
|
| 372 |
+
Returns:
|
| 373 |
+
[`InferenceEndpoint`]: the same Inference Endpoint, mutated in place with the latest data.
|
| 374 |
+
"""
|
| 375 |
+
obj = self._api.scale_to_zero_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type]
|
| 376 |
+
self.raw = obj.raw
|
| 377 |
+
self._populate_from_raw()
|
| 378 |
+
return self
|
| 379 |
+
|
| 380 |
+
def delete(self) -> None:
|
| 381 |
+
"""Delete the Inference Endpoint.
|
| 382 |
+
|
| 383 |
+
This operation is not reversible. If you don't want to be charged for an Inference Endpoint, it is preferable
|
| 384 |
+
to pause it with [`InferenceEndpoint.pause`] or scale it to zero with [`InferenceEndpoint.scale_to_zero`].
|
| 385 |
+
|
| 386 |
+
This is an alias for [`HfApi.delete_inference_endpoint`].
|
| 387 |
+
"""
|
| 388 |
+
self._api.delete_inference_endpoint(name=self.name, namespace=self.namespace, token=self._token) # type: ignore [arg-type]
|
| 389 |
+
|
| 390 |
+
def _populate_from_raw(self) -> None:
|
| 391 |
+
"""Populate fields from raw dictionary.
|
| 392 |
+
|
| 393 |
+
Called in __post_init__ + each time the Inference Endpoint is updated.
|
| 394 |
+
"""
|
| 395 |
+
# Repr fields
|
| 396 |
+
self.name = self.raw["name"]
|
| 397 |
+
self.repository = self.raw["model"]["repository"]
|
| 398 |
+
self.status = self.raw["status"]["state"]
|
| 399 |
+
self.url = self.raw["status"].get("url")
|
| 400 |
+
|
| 401 |
+
# Other fields
|
| 402 |
+
self.framework = self.raw["model"]["framework"]
|
| 403 |
+
self.revision = self.raw["model"]["revision"]
|
| 404 |
+
self.task = self.raw["model"]["task"]
|
| 405 |
+
self.created_at = parse_datetime(self.raw["status"]["createdAt"])
|
| 406 |
+
self.updated_at = parse_datetime(self.raw["status"]["updatedAt"])
|
| 407 |
+
self.type = self.raw["type"]
|
parrot/lib/python3.10/site-packages/huggingface_hub/_local_folder.py
ADDED
|
@@ -0,0 +1,432 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2024-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains utilities to handle the `../.cache/huggingface` folder in local directories.
|
| 16 |
+
|
| 17 |
+
First discussed in https://github.com/huggingface/huggingface_hub/issues/1738 to store
|
| 18 |
+
download metadata when downloading files from the hub to a local directory (without
|
| 19 |
+
using the cache).
|
| 20 |
+
|
| 21 |
+
./.cache/huggingface folder structure:
|
| 22 |
+
[4.0K] data
|
| 23 |
+
├── [4.0K] .cache
|
| 24 |
+
│ └── [4.0K] huggingface
|
| 25 |
+
│ └── [4.0K] download
|
| 26 |
+
│ ├── [ 16] file.parquet.metadata
|
| 27 |
+
│ ├── [ 16] file.txt.metadata
|
| 28 |
+
│ └── [4.0K] folder
|
| 29 |
+
│ └── [ 16] file.parquet.metadata
|
| 30 |
+
│
|
| 31 |
+
├── [6.5G] file.parquet
|
| 32 |
+
├── [1.5K] file.txt
|
| 33 |
+
└── [4.0K] folder
|
| 34 |
+
└── [ 16] file.parquet
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
Download metadata file structure:
|
| 38 |
+
```
|
| 39 |
+
# file.txt.metadata
|
| 40 |
+
11c5a3d5811f50298f278a704980280950aedb10
|
| 41 |
+
a16a55fda99d2f2e7b69cce5cf93ff4ad3049930
|
| 42 |
+
1712656091.123
|
| 43 |
+
|
| 44 |
+
# file.parquet.metadata
|
| 45 |
+
11c5a3d5811f50298f278a704980280950aedb10
|
| 46 |
+
7c5d3f4b8b76583b422fcb9189ad6c89d5d97a094541ce8932dce3ecabde1421
|
| 47 |
+
1712656091.123
|
| 48 |
+
}
|
| 49 |
+
```
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
import base64
|
| 53 |
+
import hashlib
|
| 54 |
+
import logging
|
| 55 |
+
import os
|
| 56 |
+
import time
|
| 57 |
+
from dataclasses import dataclass
|
| 58 |
+
from pathlib import Path
|
| 59 |
+
from typing import Optional
|
| 60 |
+
|
| 61 |
+
from .utils import WeakFileLock
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
logger = logging.getLogger(__name__)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
@dataclass
|
| 68 |
+
class LocalDownloadFilePaths:
|
| 69 |
+
"""
|
| 70 |
+
Paths to the files related to a download process in a local dir.
|
| 71 |
+
|
| 72 |
+
Returned by [`get_local_download_paths`].
|
| 73 |
+
|
| 74 |
+
Attributes:
|
| 75 |
+
file_path (`Path`):
|
| 76 |
+
Path where the file will be saved.
|
| 77 |
+
lock_path (`Path`):
|
| 78 |
+
Path to the lock file used to ensure atomicity when reading/writing metadata.
|
| 79 |
+
metadata_path (`Path`):
|
| 80 |
+
Path to the metadata file.
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
file_path: Path
|
| 84 |
+
lock_path: Path
|
| 85 |
+
metadata_path: Path
|
| 86 |
+
|
| 87 |
+
def incomplete_path(self, etag: str) -> Path:
|
| 88 |
+
"""Return the path where a file will be temporarily downloaded before being moved to `file_path`."""
|
| 89 |
+
return self.metadata_path.parent / f"{_short_hash(self.metadata_path.name)}.{etag}.incomplete"
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
@dataclass(frozen=True)
|
| 93 |
+
class LocalUploadFilePaths:
|
| 94 |
+
"""
|
| 95 |
+
Paths to the files related to an upload process in a local dir.
|
| 96 |
+
|
| 97 |
+
Returned by [`get_local_upload_paths`].
|
| 98 |
+
|
| 99 |
+
Attributes:
|
| 100 |
+
path_in_repo (`str`):
|
| 101 |
+
Path of the file in the repo.
|
| 102 |
+
file_path (`Path`):
|
| 103 |
+
Path where the file will be saved.
|
| 104 |
+
lock_path (`Path`):
|
| 105 |
+
Path to the lock file used to ensure atomicity when reading/writing metadata.
|
| 106 |
+
metadata_path (`Path`):
|
| 107 |
+
Path to the metadata file.
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
path_in_repo: str
|
| 111 |
+
file_path: Path
|
| 112 |
+
lock_path: Path
|
| 113 |
+
metadata_path: Path
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
@dataclass
|
| 117 |
+
class LocalDownloadFileMetadata:
|
| 118 |
+
"""
|
| 119 |
+
Metadata about a file in the local directory related to a download process.
|
| 120 |
+
|
| 121 |
+
Attributes:
|
| 122 |
+
filename (`str`):
|
| 123 |
+
Path of the file in the repo.
|
| 124 |
+
commit_hash (`str`):
|
| 125 |
+
Commit hash of the file in the repo.
|
| 126 |
+
etag (`str`):
|
| 127 |
+
ETag of the file in the repo. Used to check if the file has changed.
|
| 128 |
+
For LFS files, this is the sha256 of the file. For regular files, it corresponds to the git hash.
|
| 129 |
+
timestamp (`int`):
|
| 130 |
+
Unix timestamp of when the metadata was saved i.e. when the metadata was accurate.
|
| 131 |
+
"""
|
| 132 |
+
|
| 133 |
+
filename: str
|
| 134 |
+
commit_hash: str
|
| 135 |
+
etag: str
|
| 136 |
+
timestamp: float
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
@dataclass
|
| 140 |
+
class LocalUploadFileMetadata:
|
| 141 |
+
"""
|
| 142 |
+
Metadata about a file in the local directory related to an upload process.
|
| 143 |
+
"""
|
| 144 |
+
|
| 145 |
+
size: int
|
| 146 |
+
|
| 147 |
+
# Default values correspond to "we don't know yet"
|
| 148 |
+
timestamp: Optional[float] = None
|
| 149 |
+
should_ignore: Optional[bool] = None
|
| 150 |
+
sha256: Optional[str] = None
|
| 151 |
+
upload_mode: Optional[str] = None
|
| 152 |
+
is_uploaded: bool = False
|
| 153 |
+
is_committed: bool = False
|
| 154 |
+
|
| 155 |
+
def save(self, paths: LocalUploadFilePaths) -> None:
|
| 156 |
+
"""Save the metadata to disk."""
|
| 157 |
+
with WeakFileLock(paths.lock_path):
|
| 158 |
+
with paths.metadata_path.open("w") as f:
|
| 159 |
+
new_timestamp = time.time()
|
| 160 |
+
f.write(str(new_timestamp) + "\n")
|
| 161 |
+
|
| 162 |
+
f.write(str(self.size)) # never None
|
| 163 |
+
f.write("\n")
|
| 164 |
+
|
| 165 |
+
if self.should_ignore is not None:
|
| 166 |
+
f.write(str(int(self.should_ignore)))
|
| 167 |
+
f.write("\n")
|
| 168 |
+
|
| 169 |
+
if self.sha256 is not None:
|
| 170 |
+
f.write(self.sha256)
|
| 171 |
+
f.write("\n")
|
| 172 |
+
|
| 173 |
+
if self.upload_mode is not None:
|
| 174 |
+
f.write(self.upload_mode)
|
| 175 |
+
f.write("\n")
|
| 176 |
+
|
| 177 |
+
f.write(str(int(self.is_uploaded)) + "\n")
|
| 178 |
+
f.write(str(int(self.is_committed)) + "\n")
|
| 179 |
+
|
| 180 |
+
self.timestamp = new_timestamp
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def get_local_download_paths(local_dir: Path, filename: str) -> LocalDownloadFilePaths:
|
| 184 |
+
"""Compute paths to the files related to a download process.
|
| 185 |
+
|
| 186 |
+
Folders containing the paths are all guaranteed to exist.
|
| 187 |
+
|
| 188 |
+
Args:
|
| 189 |
+
local_dir (`Path`):
|
| 190 |
+
Path to the local directory in which files are downloaded.
|
| 191 |
+
filename (`str`):
|
| 192 |
+
Path of the file in the repo.
|
| 193 |
+
|
| 194 |
+
Return:
|
| 195 |
+
[`LocalDownloadFilePaths`]: the paths to the files (file_path, lock_path, metadata_path, incomplete_path).
|
| 196 |
+
"""
|
| 197 |
+
# filename is the path in the Hub repository (separated by '/')
|
| 198 |
+
# make sure to have a cross platform transcription
|
| 199 |
+
sanitized_filename = os.path.join(*filename.split("/"))
|
| 200 |
+
if os.name == "nt":
|
| 201 |
+
if sanitized_filename.startswith("..\\") or "\\..\\" in sanitized_filename:
|
| 202 |
+
raise ValueError(
|
| 203 |
+
f"Invalid filename: cannot handle filename '{sanitized_filename}' on Windows. Please ask the repository"
|
| 204 |
+
" owner to rename this file."
|
| 205 |
+
)
|
| 206 |
+
file_path = local_dir / sanitized_filename
|
| 207 |
+
metadata_path = _huggingface_dir(local_dir) / "download" / f"{sanitized_filename}.metadata"
|
| 208 |
+
lock_path = metadata_path.with_suffix(".lock")
|
| 209 |
+
|
| 210 |
+
# Some Windows versions do not allow for paths longer than 255 characters.
|
| 211 |
+
# In this case, we must specify it as an extended path by using the "\\?\" prefix
|
| 212 |
+
if os.name == "nt":
|
| 213 |
+
if not str(local_dir).startswith("\\\\?\\") and len(os.path.abspath(lock_path)) > 255:
|
| 214 |
+
file_path = Path("\\\\?\\" + os.path.abspath(file_path))
|
| 215 |
+
lock_path = Path("\\\\?\\" + os.path.abspath(lock_path))
|
| 216 |
+
metadata_path = Path("\\\\?\\" + os.path.abspath(metadata_path))
|
| 217 |
+
|
| 218 |
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 219 |
+
metadata_path.parent.mkdir(parents=True, exist_ok=True)
|
| 220 |
+
return LocalDownloadFilePaths(file_path=file_path, lock_path=lock_path, metadata_path=metadata_path)
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def get_local_upload_paths(local_dir: Path, filename: str) -> LocalUploadFilePaths:
|
| 224 |
+
"""Compute paths to the files related to an upload process.
|
| 225 |
+
|
| 226 |
+
Folders containing the paths are all guaranteed to exist.
|
| 227 |
+
|
| 228 |
+
Args:
|
| 229 |
+
local_dir (`Path`):
|
| 230 |
+
Path to the local directory that is uploaded.
|
| 231 |
+
filename (`str`):
|
| 232 |
+
Path of the file in the repo.
|
| 233 |
+
|
| 234 |
+
Return:
|
| 235 |
+
[`LocalUploadFilePaths`]: the paths to the files (file_path, lock_path, metadata_path).
|
| 236 |
+
"""
|
| 237 |
+
# filename is the path in the Hub repository (separated by '/')
|
| 238 |
+
# make sure to have a cross platform transcription
|
| 239 |
+
sanitized_filename = os.path.join(*filename.split("/"))
|
| 240 |
+
if os.name == "nt":
|
| 241 |
+
if sanitized_filename.startswith("..\\") or "\\..\\" in sanitized_filename:
|
| 242 |
+
raise ValueError(
|
| 243 |
+
f"Invalid filename: cannot handle filename '{sanitized_filename}' on Windows. Please ask the repository"
|
| 244 |
+
" owner to rename this file."
|
| 245 |
+
)
|
| 246 |
+
file_path = local_dir / sanitized_filename
|
| 247 |
+
metadata_path = _huggingface_dir(local_dir) / "upload" / f"{sanitized_filename}.metadata"
|
| 248 |
+
lock_path = metadata_path.with_suffix(".lock")
|
| 249 |
+
|
| 250 |
+
# Some Windows versions do not allow for paths longer than 255 characters.
|
| 251 |
+
# In this case, we must specify it as an extended path by using the "\\?\" prefix
|
| 252 |
+
if os.name == "nt":
|
| 253 |
+
if not str(local_dir).startswith("\\\\?\\") and len(os.path.abspath(lock_path)) > 255:
|
| 254 |
+
file_path = Path("\\\\?\\" + os.path.abspath(file_path))
|
| 255 |
+
lock_path = Path("\\\\?\\" + os.path.abspath(lock_path))
|
| 256 |
+
metadata_path = Path("\\\\?\\" + os.path.abspath(metadata_path))
|
| 257 |
+
|
| 258 |
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 259 |
+
metadata_path.parent.mkdir(parents=True, exist_ok=True)
|
| 260 |
+
return LocalUploadFilePaths(
|
| 261 |
+
path_in_repo=filename, file_path=file_path, lock_path=lock_path, metadata_path=metadata_path
|
| 262 |
+
)
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
def read_download_metadata(local_dir: Path, filename: str) -> Optional[LocalDownloadFileMetadata]:
|
| 266 |
+
"""Read metadata about a file in the local directory related to a download process.
|
| 267 |
+
|
| 268 |
+
Args:
|
| 269 |
+
local_dir (`Path`):
|
| 270 |
+
Path to the local directory in which files are downloaded.
|
| 271 |
+
filename (`str`):
|
| 272 |
+
Path of the file in the repo.
|
| 273 |
+
|
| 274 |
+
Return:
|
| 275 |
+
`[LocalDownloadFileMetadata]` or `None`: the metadata if it exists, `None` otherwise.
|
| 276 |
+
"""
|
| 277 |
+
paths = get_local_download_paths(local_dir, filename)
|
| 278 |
+
with WeakFileLock(paths.lock_path):
|
| 279 |
+
if paths.metadata_path.exists():
|
| 280 |
+
try:
|
| 281 |
+
with paths.metadata_path.open() as f:
|
| 282 |
+
commit_hash = f.readline().strip()
|
| 283 |
+
etag = f.readline().strip()
|
| 284 |
+
timestamp = float(f.readline().strip())
|
| 285 |
+
metadata = LocalDownloadFileMetadata(
|
| 286 |
+
filename=filename,
|
| 287 |
+
commit_hash=commit_hash,
|
| 288 |
+
etag=etag,
|
| 289 |
+
timestamp=timestamp,
|
| 290 |
+
)
|
| 291 |
+
except Exception as e:
|
| 292 |
+
# remove the metadata file if it is corrupted / not the right format
|
| 293 |
+
logger.warning(
|
| 294 |
+
f"Invalid metadata file {paths.metadata_path}: {e}. Removing it from disk and continue."
|
| 295 |
+
)
|
| 296 |
+
try:
|
| 297 |
+
paths.metadata_path.unlink()
|
| 298 |
+
except Exception as e:
|
| 299 |
+
logger.warning(f"Could not remove corrupted metadata file {paths.metadata_path}: {e}")
|
| 300 |
+
|
| 301 |
+
try:
|
| 302 |
+
# check if the file exists and hasn't been modified since the metadata was saved
|
| 303 |
+
stat = paths.file_path.stat()
|
| 304 |
+
if (
|
| 305 |
+
stat.st_mtime - 1 <= metadata.timestamp
|
| 306 |
+
): # allow 1s difference as stat.st_mtime might not be precise
|
| 307 |
+
return metadata
|
| 308 |
+
logger.info(f"Ignored metadata for '{filename}' (outdated). Will re-compute hash.")
|
| 309 |
+
except FileNotFoundError:
|
| 310 |
+
# file does not exist => metadata is outdated
|
| 311 |
+
return None
|
| 312 |
+
return None
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def read_upload_metadata(local_dir: Path, filename: str) -> LocalUploadFileMetadata:
|
| 316 |
+
"""Read metadata about a file in the local directory related to an upload process.
|
| 317 |
+
|
| 318 |
+
TODO: factorize logic with `read_download_metadata`.
|
| 319 |
+
|
| 320 |
+
Args:
|
| 321 |
+
local_dir (`Path`):
|
| 322 |
+
Path to the local directory in which files are downloaded.
|
| 323 |
+
filename (`str`):
|
| 324 |
+
Path of the file in the repo.
|
| 325 |
+
|
| 326 |
+
Return:
|
| 327 |
+
`[LocalUploadFileMetadata]` or `None`: the metadata if it exists, `None` otherwise.
|
| 328 |
+
"""
|
| 329 |
+
paths = get_local_upload_paths(local_dir, filename)
|
| 330 |
+
with WeakFileLock(paths.lock_path):
|
| 331 |
+
if paths.metadata_path.exists():
|
| 332 |
+
try:
|
| 333 |
+
with paths.metadata_path.open() as f:
|
| 334 |
+
timestamp = float(f.readline().strip())
|
| 335 |
+
|
| 336 |
+
size = int(f.readline().strip()) # never None
|
| 337 |
+
|
| 338 |
+
_should_ignore = f.readline().strip()
|
| 339 |
+
should_ignore = None if _should_ignore == "" else bool(int(_should_ignore))
|
| 340 |
+
|
| 341 |
+
_sha256 = f.readline().strip()
|
| 342 |
+
sha256 = None if _sha256 == "" else _sha256
|
| 343 |
+
|
| 344 |
+
_upload_mode = f.readline().strip()
|
| 345 |
+
upload_mode = None if _upload_mode == "" else _upload_mode
|
| 346 |
+
if upload_mode not in (None, "regular", "lfs"):
|
| 347 |
+
raise ValueError(f"Invalid upload mode in metadata {paths.path_in_repo}: {upload_mode}")
|
| 348 |
+
|
| 349 |
+
is_uploaded = bool(int(f.readline().strip()))
|
| 350 |
+
is_committed = bool(int(f.readline().strip()))
|
| 351 |
+
|
| 352 |
+
metadata = LocalUploadFileMetadata(
|
| 353 |
+
timestamp=timestamp,
|
| 354 |
+
size=size,
|
| 355 |
+
should_ignore=should_ignore,
|
| 356 |
+
sha256=sha256,
|
| 357 |
+
upload_mode=upload_mode,
|
| 358 |
+
is_uploaded=is_uploaded,
|
| 359 |
+
is_committed=is_committed,
|
| 360 |
+
)
|
| 361 |
+
except Exception as e:
|
| 362 |
+
# remove the metadata file if it is corrupted / not the right format
|
| 363 |
+
logger.warning(
|
| 364 |
+
f"Invalid metadata file {paths.metadata_path}: {e}. Removing it from disk and continue."
|
| 365 |
+
)
|
| 366 |
+
try:
|
| 367 |
+
paths.metadata_path.unlink()
|
| 368 |
+
except Exception as e:
|
| 369 |
+
logger.warning(f"Could not remove corrupted metadata file {paths.metadata_path}: {e}")
|
| 370 |
+
|
| 371 |
+
# TODO: can we do better?
|
| 372 |
+
if (
|
| 373 |
+
metadata.timestamp is not None
|
| 374 |
+
and metadata.is_uploaded # file was uploaded
|
| 375 |
+
and not metadata.is_committed # but not committed
|
| 376 |
+
and time.time() - metadata.timestamp > 20 * 3600 # and it's been more than 20 hours
|
| 377 |
+
): # => we consider it as garbage-collected by S3
|
| 378 |
+
metadata.is_uploaded = False
|
| 379 |
+
|
| 380 |
+
# check if the file exists and hasn't been modified since the metadata was saved
|
| 381 |
+
try:
|
| 382 |
+
if metadata.timestamp is not None and paths.file_path.stat().st_mtime <= metadata.timestamp:
|
| 383 |
+
return metadata
|
| 384 |
+
logger.info(f"Ignored metadata for '{filename}' (outdated). Will re-compute hash.")
|
| 385 |
+
except FileNotFoundError:
|
| 386 |
+
# file does not exist => metadata is outdated
|
| 387 |
+
pass
|
| 388 |
+
|
| 389 |
+
# empty metadata => we don't know anything expect its size
|
| 390 |
+
return LocalUploadFileMetadata(size=paths.file_path.stat().st_size)
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
def write_download_metadata(local_dir: Path, filename: str, commit_hash: str, etag: str) -> None:
|
| 394 |
+
"""Write metadata about a file in the local directory related to a download process.
|
| 395 |
+
|
| 396 |
+
Args:
|
| 397 |
+
local_dir (`Path`):
|
| 398 |
+
Path to the local directory in which files are downloaded.
|
| 399 |
+
"""
|
| 400 |
+
paths = get_local_download_paths(local_dir, filename)
|
| 401 |
+
with WeakFileLock(paths.lock_path):
|
| 402 |
+
with paths.metadata_path.open("w") as f:
|
| 403 |
+
f.write(f"{commit_hash}\n{etag}\n{time.time()}\n")
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def _huggingface_dir(local_dir: Path) -> Path:
|
| 407 |
+
"""Return the path to the `.cache/huggingface` directory in a local directory."""
|
| 408 |
+
# Wrap in lru_cache to avoid overwriting the .gitignore file if called multiple times
|
| 409 |
+
path = local_dir / ".cache" / "huggingface"
|
| 410 |
+
path.mkdir(exist_ok=True, parents=True)
|
| 411 |
+
|
| 412 |
+
# Create a .gitignore file in the .cache/huggingface directory if it doesn't exist
|
| 413 |
+
# Should be thread-safe enough like this.
|
| 414 |
+
gitignore = path / ".gitignore"
|
| 415 |
+
gitignore_lock = path / ".gitignore.lock"
|
| 416 |
+
if not gitignore.exists():
|
| 417 |
+
try:
|
| 418 |
+
with WeakFileLock(gitignore_lock, timeout=0.1):
|
| 419 |
+
gitignore.write_text("*")
|
| 420 |
+
except IndexError:
|
| 421 |
+
pass
|
| 422 |
+
except OSError: # TimeoutError, FileNotFoundError, PermissionError, etc.
|
| 423 |
+
pass
|
| 424 |
+
try:
|
| 425 |
+
gitignore_lock.unlink()
|
| 426 |
+
except OSError:
|
| 427 |
+
pass
|
| 428 |
+
return path
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def _short_hash(filename: str) -> str:
|
| 432 |
+
return base64.urlsafe_b64encode(hashlib.sha1(filename.encode()).digest()).decode()
|
parrot/lib/python3.10/site-packages/huggingface_hub/_login.py
ADDED
|
@@ -0,0 +1,520 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Contains methods to log in to the Hub."""
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
import subprocess
|
| 18 |
+
from getpass import getpass
|
| 19 |
+
from pathlib import Path
|
| 20 |
+
from typing import Optional
|
| 21 |
+
|
| 22 |
+
from . import constants
|
| 23 |
+
from .commands._cli_utils import ANSI
|
| 24 |
+
from .utils import (
|
| 25 |
+
capture_output,
|
| 26 |
+
get_token,
|
| 27 |
+
is_google_colab,
|
| 28 |
+
is_notebook,
|
| 29 |
+
list_credential_helpers,
|
| 30 |
+
logging,
|
| 31 |
+
run_subprocess,
|
| 32 |
+
set_git_credential,
|
| 33 |
+
unset_git_credential,
|
| 34 |
+
)
|
| 35 |
+
from .utils._auth import (
|
| 36 |
+
_get_token_by_name,
|
| 37 |
+
_get_token_from_environment,
|
| 38 |
+
_get_token_from_file,
|
| 39 |
+
_get_token_from_google_colab,
|
| 40 |
+
_save_stored_tokens,
|
| 41 |
+
_save_token,
|
| 42 |
+
get_stored_tokens,
|
| 43 |
+
)
|
| 44 |
+
from .utils._deprecation import _deprecate_arguments, _deprecate_positional_args
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
logger = logging.get_logger(__name__)
|
| 48 |
+
|
| 49 |
+
_HF_LOGO_ASCII = """
|
| 50 |
+
_| _| _| _| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _|_|_|_| _|_| _|_|_| _|_|_|_|
|
| 51 |
+
_| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _|
|
| 52 |
+
_|_|_|_| _| _| _| _|_| _| _|_| _| _| _| _| _| _|_| _|_|_| _|_|_|_| _| _|_|_|
|
| 53 |
+
_| _| _| _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _|
|
| 54 |
+
_| _| _|_| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _| _| _| _|_|_| _|_|_|_|
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@_deprecate_arguments(
|
| 59 |
+
version="1.0",
|
| 60 |
+
deprecated_args="write_permission",
|
| 61 |
+
custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
|
| 62 |
+
)
|
| 63 |
+
@_deprecate_positional_args(version="1.0")
|
| 64 |
+
def login(
|
| 65 |
+
token: Optional[str] = None,
|
| 66 |
+
*,
|
| 67 |
+
add_to_git_credential: bool = False,
|
| 68 |
+
new_session: bool = True,
|
| 69 |
+
write_permission: bool = False,
|
| 70 |
+
) -> None:
|
| 71 |
+
"""Login the machine to access the Hub.
|
| 72 |
+
|
| 73 |
+
The `token` is persisted in cache and set as a git credential. Once done, the machine
|
| 74 |
+
is logged in and the access token will be available across all `huggingface_hub`
|
| 75 |
+
components. If `token` is not provided, it will be prompted to the user either with
|
| 76 |
+
a widget (in a notebook) or via the terminal.
|
| 77 |
+
|
| 78 |
+
To log in from outside of a script, one can also use `huggingface-cli login` which is
|
| 79 |
+
a cli command that wraps [`login`].
|
| 80 |
+
|
| 81 |
+
<Tip>
|
| 82 |
+
|
| 83 |
+
[`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
|
| 84 |
+
extends its capabilities.
|
| 85 |
+
|
| 86 |
+
</Tip>
|
| 87 |
+
|
| 88 |
+
<Tip>
|
| 89 |
+
|
| 90 |
+
When the token is not passed, [`login`] will automatically detect if the script runs
|
| 91 |
+
in a notebook or not. However, this detection might not be accurate due to the
|
| 92 |
+
variety of notebooks that exists nowadays. If that is the case, you can always force
|
| 93 |
+
the UI by using [`notebook_login`] or [`interpreter_login`].
|
| 94 |
+
|
| 95 |
+
</Tip>
|
| 96 |
+
|
| 97 |
+
Args:
|
| 98 |
+
token (`str`, *optional*):
|
| 99 |
+
User access token to generate from https://huggingface.co/settings/token.
|
| 100 |
+
add_to_git_credential (`bool`, defaults to `False`):
|
| 101 |
+
If `True`, token will be set as git credential. If no git credential helper
|
| 102 |
+
is configured, a warning will be displayed to the user. If `token` is `None`,
|
| 103 |
+
the value of `add_to_git_credential` is ignored and will be prompted again
|
| 104 |
+
to the end user.
|
| 105 |
+
new_session (`bool`, defaults to `True`):
|
| 106 |
+
If `True`, will request a token even if one is already saved on the machine.
|
| 107 |
+
write_permission (`bool`):
|
| 108 |
+
Ignored and deprecated argument.
|
| 109 |
+
Raises:
|
| 110 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 111 |
+
If an organization token is passed. Only personal account tokens are valid
|
| 112 |
+
to log in.
|
| 113 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 114 |
+
If token is invalid.
|
| 115 |
+
[`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
|
| 116 |
+
If running in a notebook but `ipywidgets` is not installed.
|
| 117 |
+
"""
|
| 118 |
+
if token is not None:
|
| 119 |
+
if not add_to_git_credential:
|
| 120 |
+
logger.info(
|
| 121 |
+
"The token has not been saved to the git credentials helper. Pass "
|
| 122 |
+
"`add_to_git_credential=True` in this function directly or "
|
| 123 |
+
"`--add-to-git-credential` if using via `huggingface-cli` if "
|
| 124 |
+
"you want to set the git credential as well."
|
| 125 |
+
)
|
| 126 |
+
_login(token, add_to_git_credential=add_to_git_credential)
|
| 127 |
+
elif is_notebook():
|
| 128 |
+
notebook_login(new_session=new_session)
|
| 129 |
+
else:
|
| 130 |
+
interpreter_login(new_session=new_session)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def logout(token_name: Optional[str] = None) -> None:
|
| 134 |
+
"""Logout the machine from the Hub.
|
| 135 |
+
|
| 136 |
+
Token is deleted from the machine and removed from git credential.
|
| 137 |
+
|
| 138 |
+
Args:
|
| 139 |
+
token_name (`str`, *optional*):
|
| 140 |
+
Name of the access token to logout from. If `None`, will logout from all saved access tokens.
|
| 141 |
+
Raises:
|
| 142 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError):
|
| 143 |
+
If the access token name is not found.
|
| 144 |
+
"""
|
| 145 |
+
if get_token() is None and not get_stored_tokens(): # No active token and no saved access tokens
|
| 146 |
+
logger.warning("Not logged in!")
|
| 147 |
+
return
|
| 148 |
+
if not token_name:
|
| 149 |
+
# Delete all saved access tokens and token
|
| 150 |
+
for file_path in (constants.HF_TOKEN_PATH, constants.HF_STORED_TOKENS_PATH):
|
| 151 |
+
try:
|
| 152 |
+
Path(file_path).unlink()
|
| 153 |
+
except FileNotFoundError:
|
| 154 |
+
pass
|
| 155 |
+
logger.info("Successfully logged out from all access tokens.")
|
| 156 |
+
else:
|
| 157 |
+
_logout_from_token(token_name)
|
| 158 |
+
logger.info(f"Successfully logged out from access token: {token_name}.")
|
| 159 |
+
|
| 160 |
+
unset_git_credential()
|
| 161 |
+
|
| 162 |
+
# Check if still logged in
|
| 163 |
+
if _get_token_from_google_colab() is not None:
|
| 164 |
+
raise EnvironmentError(
|
| 165 |
+
"You are automatically logged in using a Google Colab secret.\n"
|
| 166 |
+
"To log out, you must unset the `HF_TOKEN` secret in your Colab settings."
|
| 167 |
+
)
|
| 168 |
+
if _get_token_from_environment() is not None:
|
| 169 |
+
raise EnvironmentError(
|
| 170 |
+
"Token has been deleted from your machine but you are still logged in.\n"
|
| 171 |
+
"To log out, you must clear out both `HF_TOKEN` and `HUGGING_FACE_HUB_TOKEN` environment variables."
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def auth_switch(token_name: str, add_to_git_credential: bool = False) -> None:
|
| 176 |
+
"""Switch to a different access token.
|
| 177 |
+
|
| 178 |
+
Args:
|
| 179 |
+
token_name (`str`):
|
| 180 |
+
Name of the access token to switch to.
|
| 181 |
+
add_to_git_credential (`bool`, defaults to `False`):
|
| 182 |
+
If `True`, token will be set as git credential. If no git credential helper
|
| 183 |
+
is configured, a warning will be displayed to the user. If `token` is `None`,
|
| 184 |
+
the value of `add_to_git_credential` is ignored and will be prompted again
|
| 185 |
+
to the end user.
|
| 186 |
+
|
| 187 |
+
Raises:
|
| 188 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError):
|
| 189 |
+
If the access token name is not found.
|
| 190 |
+
"""
|
| 191 |
+
token = _get_token_by_name(token_name)
|
| 192 |
+
if not token:
|
| 193 |
+
raise ValueError(f"Access token {token_name} not found in {constants.HF_STORED_TOKENS_PATH}")
|
| 194 |
+
# Write token to HF_TOKEN_PATH
|
| 195 |
+
_set_active_token(token_name, add_to_git_credential)
|
| 196 |
+
logger.info(f"The current active token is: {token_name}")
|
| 197 |
+
token_from_environment = _get_token_from_environment()
|
| 198 |
+
if token_from_environment is not None and token_from_environment != token:
|
| 199 |
+
logger.warning(
|
| 200 |
+
"The environment variable `HF_TOKEN` is set and will override the access token you've just switched to."
|
| 201 |
+
)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def auth_list() -> None:
|
| 205 |
+
"""List all stored access tokens."""
|
| 206 |
+
tokens = get_stored_tokens()
|
| 207 |
+
|
| 208 |
+
if not tokens:
|
| 209 |
+
logger.info("No access tokens found.")
|
| 210 |
+
return
|
| 211 |
+
# Find current token
|
| 212 |
+
current_token = get_token()
|
| 213 |
+
current_token_name = None
|
| 214 |
+
for token_name in tokens:
|
| 215 |
+
if tokens.get(token_name) == current_token:
|
| 216 |
+
current_token_name = token_name
|
| 217 |
+
# Print header
|
| 218 |
+
max_offset = max(len("token"), max(len(token) for token in tokens)) + 2
|
| 219 |
+
print(f" {{:<{max_offset}}}| {{:<15}}".format("name", "token"))
|
| 220 |
+
print("-" * (max_offset + 2) + "|" + "-" * 15)
|
| 221 |
+
|
| 222 |
+
# Print saved access tokens
|
| 223 |
+
for token_name in tokens:
|
| 224 |
+
token = tokens.get(token_name, "<not set>")
|
| 225 |
+
masked_token = f"{token[:3]}****{token[-4:]}" if token != "<not set>" else token
|
| 226 |
+
is_current = "*" if token == current_token else " "
|
| 227 |
+
|
| 228 |
+
print(f"{is_current} {{:<{max_offset}}}| {{:<15}}".format(token_name, masked_token))
|
| 229 |
+
|
| 230 |
+
if _get_token_from_environment():
|
| 231 |
+
logger.warning(
|
| 232 |
+
"\nNote: Environment variable `HF_TOKEN` is set and is the current active token independently from the stored tokens listed above."
|
| 233 |
+
)
|
| 234 |
+
elif current_token_name is None:
|
| 235 |
+
logger.warning(
|
| 236 |
+
"\nNote: No active token is set and no environment variable `HF_TOKEN` is found. Use `huggingface-cli login` to log in."
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
###
|
| 241 |
+
# Interpreter-based login (text)
|
| 242 |
+
###
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
@_deprecate_arguments(
|
| 246 |
+
version="1.0",
|
| 247 |
+
deprecated_args="write_permission",
|
| 248 |
+
custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
|
| 249 |
+
)
|
| 250 |
+
@_deprecate_positional_args(version="1.0")
|
| 251 |
+
def interpreter_login(*, new_session: bool = True, write_permission: bool = False) -> None:
|
| 252 |
+
"""
|
| 253 |
+
Displays a prompt to log in to the HF website and store the token.
|
| 254 |
+
|
| 255 |
+
This is equivalent to [`login`] without passing a token when not run in a notebook.
|
| 256 |
+
[`interpreter_login`] is useful if you want to force the use of the terminal prompt
|
| 257 |
+
instead of a notebook widget.
|
| 258 |
+
|
| 259 |
+
For more details, see [`login`].
|
| 260 |
+
|
| 261 |
+
Args:
|
| 262 |
+
new_session (`bool`, defaults to `True`):
|
| 263 |
+
If `True`, will request a token even if one is already saved on the machine.
|
| 264 |
+
write_permission (`bool`):
|
| 265 |
+
Ignored and deprecated argument.
|
| 266 |
+
"""
|
| 267 |
+
if not new_session and get_token() is not None:
|
| 268 |
+
logger.info("User is already logged in.")
|
| 269 |
+
return
|
| 270 |
+
|
| 271 |
+
from .commands.delete_cache import _ask_for_confirmation_no_tui
|
| 272 |
+
|
| 273 |
+
print(_HF_LOGO_ASCII)
|
| 274 |
+
if get_token() is not None:
|
| 275 |
+
logger.info(
|
| 276 |
+
" A token is already saved on your machine. Run `huggingface-cli"
|
| 277 |
+
" whoami` to get more information or `huggingface-cli logout` if you want"
|
| 278 |
+
" to log out."
|
| 279 |
+
)
|
| 280 |
+
logger.info(" Setting a new token will erase the existing one.")
|
| 281 |
+
|
| 282 |
+
logger.info(
|
| 283 |
+
" To log in, `huggingface_hub` requires a token generated from https://huggingface.co/settings/tokens ."
|
| 284 |
+
)
|
| 285 |
+
if os.name == "nt":
|
| 286 |
+
logger.info("Token can be pasted using 'Right-Click'.")
|
| 287 |
+
token = getpass("Enter your token (input will not be visible): ")
|
| 288 |
+
add_to_git_credential = _ask_for_confirmation_no_tui("Add token as git credential?")
|
| 289 |
+
|
| 290 |
+
_login(token=token, add_to_git_credential=add_to_git_credential)
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
###
|
| 294 |
+
# Notebook-based login (widget)
|
| 295 |
+
###
|
| 296 |
+
|
| 297 |
+
NOTEBOOK_LOGIN_PASSWORD_HTML = """<center> <img
|
| 298 |
+
src=https://huggingface.co/front/assets/huggingface_logo-noborder.svg
|
| 299 |
+
alt='Hugging Face'> <br> Immediately click login after typing your password or
|
| 300 |
+
it might be stored in plain text in this notebook file. </center>"""
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
NOTEBOOK_LOGIN_TOKEN_HTML_START = """<center> <img
|
| 304 |
+
src=https://huggingface.co/front/assets/huggingface_logo-noborder.svg
|
| 305 |
+
alt='Hugging Face'> <br> Copy a token from <a
|
| 306 |
+
href="https://huggingface.co/settings/tokens" target="_blank">your Hugging Face
|
| 307 |
+
tokens page</a> and paste it below. <br> Immediately click login after copying
|
| 308 |
+
your token or it might be stored in plain text in this notebook file. </center>"""
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
NOTEBOOK_LOGIN_TOKEN_HTML_END = """
|
| 312 |
+
<b>Pro Tip:</b> If you don't already have one, you can create a dedicated
|
| 313 |
+
'notebooks' token with 'write' access, that you can then easily reuse for all
|
| 314 |
+
notebooks. </center>"""
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
@_deprecate_arguments(
|
| 318 |
+
version="1.0",
|
| 319 |
+
deprecated_args="write_permission",
|
| 320 |
+
custom_message="Fine-grained tokens added complexity to the permissions, making it irrelevant to check if a token has 'write' access.",
|
| 321 |
+
)
|
| 322 |
+
@_deprecate_positional_args(version="1.0")
|
| 323 |
+
def notebook_login(*, new_session: bool = True, write_permission: bool = False) -> None:
|
| 324 |
+
"""
|
| 325 |
+
Displays a widget to log in to the HF website and store the token.
|
| 326 |
+
|
| 327 |
+
This is equivalent to [`login`] without passing a token when run in a notebook.
|
| 328 |
+
[`notebook_login`] is useful if you want to force the use of the notebook widget
|
| 329 |
+
instead of a prompt in the terminal.
|
| 330 |
+
|
| 331 |
+
For more details, see [`login`].
|
| 332 |
+
|
| 333 |
+
Args:
|
| 334 |
+
new_session (`bool`, defaults to `True`):
|
| 335 |
+
If `True`, will request a token even if one is already saved on the machine.
|
| 336 |
+
write_permission (`bool`):
|
| 337 |
+
Ignored and deprecated argument.
|
| 338 |
+
"""
|
| 339 |
+
try:
|
| 340 |
+
import ipywidgets.widgets as widgets # type: ignore
|
| 341 |
+
from IPython.display import display # type: ignore
|
| 342 |
+
except ImportError:
|
| 343 |
+
raise ImportError(
|
| 344 |
+
"The `notebook_login` function can only be used in a notebook (Jupyter or"
|
| 345 |
+
" Colab) and you need the `ipywidgets` module: `pip install ipywidgets`."
|
| 346 |
+
)
|
| 347 |
+
if not new_session and get_token() is not None:
|
| 348 |
+
logger.info("User is already logged in.")
|
| 349 |
+
return
|
| 350 |
+
|
| 351 |
+
box_layout = widgets.Layout(display="flex", flex_flow="column", align_items="center", width="50%")
|
| 352 |
+
|
| 353 |
+
token_widget = widgets.Password(description="Token:")
|
| 354 |
+
git_checkbox_widget = widgets.Checkbox(value=True, description="Add token as git credential?")
|
| 355 |
+
token_finish_button = widgets.Button(description="Login")
|
| 356 |
+
|
| 357 |
+
login_token_widget = widgets.VBox(
|
| 358 |
+
[
|
| 359 |
+
widgets.HTML(NOTEBOOK_LOGIN_TOKEN_HTML_START),
|
| 360 |
+
token_widget,
|
| 361 |
+
git_checkbox_widget,
|
| 362 |
+
token_finish_button,
|
| 363 |
+
widgets.HTML(NOTEBOOK_LOGIN_TOKEN_HTML_END),
|
| 364 |
+
],
|
| 365 |
+
layout=box_layout,
|
| 366 |
+
)
|
| 367 |
+
display(login_token_widget)
|
| 368 |
+
|
| 369 |
+
# On click events
|
| 370 |
+
def login_token_event(t):
|
| 371 |
+
"""Event handler for the login button."""
|
| 372 |
+
token = token_widget.value
|
| 373 |
+
add_to_git_credential = git_checkbox_widget.value
|
| 374 |
+
# Erase token and clear value to make sure it's not saved in the notebook.
|
| 375 |
+
token_widget.value = ""
|
| 376 |
+
# Hide inputs
|
| 377 |
+
login_token_widget.children = [widgets.Label("Connecting...")]
|
| 378 |
+
try:
|
| 379 |
+
with capture_output() as captured:
|
| 380 |
+
_login(token, add_to_git_credential=add_to_git_credential)
|
| 381 |
+
message = captured.getvalue()
|
| 382 |
+
except Exception as error:
|
| 383 |
+
message = str(error)
|
| 384 |
+
# Print result (success message or error)
|
| 385 |
+
login_token_widget.children = [widgets.Label(line) for line in message.split("\n") if line.strip()]
|
| 386 |
+
|
| 387 |
+
token_finish_button.on_click(login_token_event)
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
###
|
| 391 |
+
# Login private helpers
|
| 392 |
+
###
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def _login(
|
| 396 |
+
token: str,
|
| 397 |
+
add_to_git_credential: bool,
|
| 398 |
+
) -> None:
|
| 399 |
+
from .hf_api import whoami # avoid circular import
|
| 400 |
+
|
| 401 |
+
if token.startswith("api_org"):
|
| 402 |
+
raise ValueError("You must use your personal account token, not an organization token.")
|
| 403 |
+
|
| 404 |
+
token_info = whoami(token)
|
| 405 |
+
permission = token_info["auth"]["accessToken"]["role"]
|
| 406 |
+
logger.info(f"Token is valid (permission: {permission}).")
|
| 407 |
+
|
| 408 |
+
token_name = token_info["auth"]["accessToken"]["displayName"]
|
| 409 |
+
# Store token locally
|
| 410 |
+
_save_token(token=token, token_name=token_name)
|
| 411 |
+
# Set active token
|
| 412 |
+
_set_active_token(token_name=token_name, add_to_git_credential=add_to_git_credential)
|
| 413 |
+
logger.info("Login successful.")
|
| 414 |
+
if _get_token_from_environment():
|
| 415 |
+
logger.warning(
|
| 416 |
+
"Note: Environment variable`HF_TOKEN` is set and is the current active token independently from the token you've just configured."
|
| 417 |
+
)
|
| 418 |
+
else:
|
| 419 |
+
logger.info(f"The current active token is: `{token_name}`")
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
def _logout_from_token(token_name: str) -> None:
|
| 423 |
+
"""Logout from a specific access token.
|
| 424 |
+
|
| 425 |
+
Args:
|
| 426 |
+
token_name (`str`):
|
| 427 |
+
The name of the access token to logout from.
|
| 428 |
+
Raises:
|
| 429 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError):
|
| 430 |
+
If the access token name is not found.
|
| 431 |
+
"""
|
| 432 |
+
stored_tokens = get_stored_tokens()
|
| 433 |
+
# If there is no access tokens saved or the access token name is not found, do nothing
|
| 434 |
+
if not stored_tokens or token_name not in stored_tokens:
|
| 435 |
+
return
|
| 436 |
+
|
| 437 |
+
token = stored_tokens.pop(token_name)
|
| 438 |
+
_save_stored_tokens(stored_tokens)
|
| 439 |
+
|
| 440 |
+
if token == _get_token_from_file():
|
| 441 |
+
logger.warning(f"Active token '{token_name}' has been deleted.")
|
| 442 |
+
Path(constants.HF_TOKEN_PATH).unlink(missing_ok=True)
|
| 443 |
+
|
| 444 |
+
|
| 445 |
+
def _set_active_token(
|
| 446 |
+
token_name: str,
|
| 447 |
+
add_to_git_credential: bool,
|
| 448 |
+
) -> None:
|
| 449 |
+
"""Set the active access token.
|
| 450 |
+
|
| 451 |
+
Args:
|
| 452 |
+
token_name (`str`):
|
| 453 |
+
The name of the token to set as active.
|
| 454 |
+
"""
|
| 455 |
+
token = _get_token_by_name(token_name)
|
| 456 |
+
if not token:
|
| 457 |
+
raise ValueError(f"Token {token_name} not found in {constants.HF_STORED_TOKENS_PATH}")
|
| 458 |
+
if add_to_git_credential:
|
| 459 |
+
if _is_git_credential_helper_configured():
|
| 460 |
+
set_git_credential(token)
|
| 461 |
+
logger.info(
|
| 462 |
+
"Your token has been saved in your configured git credential helpers"
|
| 463 |
+
+ f" ({','.join(list_credential_helpers())})."
|
| 464 |
+
)
|
| 465 |
+
else:
|
| 466 |
+
logger.warning("Token has not been saved to git credential helper.")
|
| 467 |
+
# Write token to HF_TOKEN_PATH
|
| 468 |
+
path = Path(constants.HF_TOKEN_PATH)
|
| 469 |
+
path.parent.mkdir(parents=True, exist_ok=True)
|
| 470 |
+
path.write_text(token)
|
| 471 |
+
logger.info(f"Your token has been saved to {constants.HF_TOKEN_PATH}")
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
def _is_git_credential_helper_configured() -> bool:
|
| 475 |
+
"""Check if a git credential helper is configured.
|
| 476 |
+
|
| 477 |
+
Warns user if not the case (except for Google Colab where "store" is set by default
|
| 478 |
+
by `huggingface_hub`).
|
| 479 |
+
"""
|
| 480 |
+
helpers = list_credential_helpers()
|
| 481 |
+
if len(helpers) > 0:
|
| 482 |
+
return True # Do not warn: at least 1 helper is set
|
| 483 |
+
|
| 484 |
+
# Only in Google Colab to avoid the warning message
|
| 485 |
+
# See https://github.com/huggingface/huggingface_hub/issues/1043#issuecomment-1247010710
|
| 486 |
+
if is_google_colab():
|
| 487 |
+
_set_store_as_git_credential_helper_globally()
|
| 488 |
+
return True # Do not warn: "store" is used by default in Google Colab
|
| 489 |
+
|
| 490 |
+
# Otherwise, warn user
|
| 491 |
+
print(
|
| 492 |
+
ANSI.red(
|
| 493 |
+
"Cannot authenticate through git-credential as no helper is defined on your"
|
| 494 |
+
" machine.\nYou might have to re-authenticate when pushing to the Hugging"
|
| 495 |
+
" Face Hub.\nRun the following command in your terminal in case you want to"
|
| 496 |
+
" set the 'store' credential helper as default.\n\ngit config --global"
|
| 497 |
+
" credential.helper store\n\nRead"
|
| 498 |
+
" https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage for more"
|
| 499 |
+
" details."
|
| 500 |
+
)
|
| 501 |
+
)
|
| 502 |
+
return False
|
| 503 |
+
|
| 504 |
+
|
| 505 |
+
def _set_store_as_git_credential_helper_globally() -> None:
|
| 506 |
+
"""Set globally the credential.helper to `store`.
|
| 507 |
+
|
| 508 |
+
To be used only in Google Colab as we assume the user doesn't care about the git
|
| 509 |
+
credential config. It is the only particular case where we don't want to display the
|
| 510 |
+
warning message in [`notebook_login()`].
|
| 511 |
+
|
| 512 |
+
Related:
|
| 513 |
+
- https://github.com/huggingface/huggingface_hub/issues/1043
|
| 514 |
+
- https://github.com/huggingface/huggingface_hub/issues/1051
|
| 515 |
+
- https://git-scm.com/docs/git-credential-store
|
| 516 |
+
"""
|
| 517 |
+
try:
|
| 518 |
+
run_subprocess("git config --global credential.helper store")
|
| 519 |
+
except subprocess.CalledProcessError as exc:
|
| 520 |
+
raise EnvironmentError(exc.stderr)
|
parrot/lib/python3.10/site-packages/huggingface_hub/_snapshot_download.py
ADDED
|
@@ -0,0 +1,307 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
from typing import Dict, List, Literal, Optional, Union
|
| 4 |
+
|
| 5 |
+
import requests
|
| 6 |
+
from tqdm.auto import tqdm as base_tqdm
|
| 7 |
+
from tqdm.contrib.concurrent import thread_map
|
| 8 |
+
|
| 9 |
+
from . import constants
|
| 10 |
+
from .errors import GatedRepoError, LocalEntryNotFoundError, RepositoryNotFoundError, RevisionNotFoundError
|
| 11 |
+
from .file_download import REGEX_COMMIT_HASH, hf_hub_download, repo_folder_name
|
| 12 |
+
from .hf_api import DatasetInfo, HfApi, ModelInfo, SpaceInfo
|
| 13 |
+
from .utils import OfflineModeIsEnabled, filter_repo_objects, logging, validate_hf_hub_args
|
| 14 |
+
from .utils import tqdm as hf_tqdm
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
logger = logging.get_logger(__name__)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@validate_hf_hub_args
|
| 21 |
+
def snapshot_download(
|
| 22 |
+
repo_id: str,
|
| 23 |
+
*,
|
| 24 |
+
repo_type: Optional[str] = None,
|
| 25 |
+
revision: Optional[str] = None,
|
| 26 |
+
cache_dir: Union[str, Path, None] = None,
|
| 27 |
+
local_dir: Union[str, Path, None] = None,
|
| 28 |
+
library_name: Optional[str] = None,
|
| 29 |
+
library_version: Optional[str] = None,
|
| 30 |
+
user_agent: Optional[Union[Dict, str]] = None,
|
| 31 |
+
proxies: Optional[Dict] = None,
|
| 32 |
+
etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
|
| 33 |
+
force_download: bool = False,
|
| 34 |
+
token: Optional[Union[bool, str]] = None,
|
| 35 |
+
local_files_only: bool = False,
|
| 36 |
+
allow_patterns: Optional[Union[List[str], str]] = None,
|
| 37 |
+
ignore_patterns: Optional[Union[List[str], str]] = None,
|
| 38 |
+
max_workers: int = 8,
|
| 39 |
+
tqdm_class: Optional[base_tqdm] = None,
|
| 40 |
+
headers: Optional[Dict[str, str]] = None,
|
| 41 |
+
endpoint: Optional[str] = None,
|
| 42 |
+
# Deprecated args
|
| 43 |
+
local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
|
| 44 |
+
resume_download: Optional[bool] = None,
|
| 45 |
+
) -> str:
|
| 46 |
+
"""Download repo files.
|
| 47 |
+
|
| 48 |
+
Download a whole snapshot of a repo's files at the specified revision. This is useful when you want all files from
|
| 49 |
+
a repo, because you don't know which ones you will need a priori. All files are nested inside a folder in order
|
| 50 |
+
to keep their actual filename relative to that folder. You can also filter which files to download using
|
| 51 |
+
`allow_patterns` and `ignore_patterns`.
|
| 52 |
+
|
| 53 |
+
If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this
|
| 54 |
+
option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir`
|
| 55 |
+
to store some metadata related to the downloaded files. While this mechanism is not as robust as the main
|
| 56 |
+
cache-system, it's optimized for regularly pulling the latest version of a repository.
|
| 57 |
+
|
| 58 |
+
An alternative would be to clone the repo but this requires git and git-lfs to be installed and properly
|
| 59 |
+
configured. It is also not possible to filter which files to download when cloning a repository using git.
|
| 60 |
+
|
| 61 |
+
Args:
|
| 62 |
+
repo_id (`str`):
|
| 63 |
+
A user or an organization name and a repo name separated by a `/`.
|
| 64 |
+
repo_type (`str`, *optional*):
|
| 65 |
+
Set to `"dataset"` or `"space"` if downloading from a dataset or space,
|
| 66 |
+
`None` or `"model"` if downloading from a model. Default is `None`.
|
| 67 |
+
revision (`str`, *optional*):
|
| 68 |
+
An optional Git revision id which can be a branch name, a tag, or a
|
| 69 |
+
commit hash.
|
| 70 |
+
cache_dir (`str`, `Path`, *optional*):
|
| 71 |
+
Path to the folder where cached files are stored.
|
| 72 |
+
local_dir (`str` or `Path`, *optional*):
|
| 73 |
+
If provided, the downloaded files will be placed under this directory.
|
| 74 |
+
library_name (`str`, *optional*):
|
| 75 |
+
The name of the library to which the object corresponds.
|
| 76 |
+
library_version (`str`, *optional*):
|
| 77 |
+
The version of the library.
|
| 78 |
+
user_agent (`str`, `dict`, *optional*):
|
| 79 |
+
The user-agent info in the form of a dictionary or a string.
|
| 80 |
+
proxies (`dict`, *optional*):
|
| 81 |
+
Dictionary mapping protocol to the URL of the proxy passed to
|
| 82 |
+
`requests.request`.
|
| 83 |
+
etag_timeout (`float`, *optional*, defaults to `10`):
|
| 84 |
+
When fetching ETag, how many seconds to wait for the server to send
|
| 85 |
+
data before giving up which is passed to `requests.request`.
|
| 86 |
+
force_download (`bool`, *optional*, defaults to `False`):
|
| 87 |
+
Whether the file should be downloaded even if it already exists in the local cache.
|
| 88 |
+
token (`str`, `bool`, *optional*):
|
| 89 |
+
A token to be used for the download.
|
| 90 |
+
- If `True`, the token is read from the HuggingFace config
|
| 91 |
+
folder.
|
| 92 |
+
- If a string, it's used as the authentication token.
|
| 93 |
+
headers (`dict`, *optional*):
|
| 94 |
+
Additional headers to include in the request. Those headers take precedence over the others.
|
| 95 |
+
local_files_only (`bool`, *optional*, defaults to `False`):
|
| 96 |
+
If `True`, avoid downloading the file and return the path to the
|
| 97 |
+
local cached file if it exists.
|
| 98 |
+
allow_patterns (`List[str]` or `str`, *optional*):
|
| 99 |
+
If provided, only files matching at least one pattern are downloaded.
|
| 100 |
+
ignore_patterns (`List[str]` or `str`, *optional*):
|
| 101 |
+
If provided, files matching any of the patterns are not downloaded.
|
| 102 |
+
max_workers (`int`, *optional*):
|
| 103 |
+
Number of concurrent threads to download files (1 thread = 1 file download).
|
| 104 |
+
Defaults to 8.
|
| 105 |
+
tqdm_class (`tqdm`, *optional*):
|
| 106 |
+
If provided, overwrites the default behavior for the progress bar. Passed
|
| 107 |
+
argument must inherit from `tqdm.auto.tqdm` or at least mimic its behavior.
|
| 108 |
+
Note that the `tqdm_class` is not passed to each individual download.
|
| 109 |
+
Defaults to the custom HF progress bar that can be disabled by setting
|
| 110 |
+
`HF_HUB_DISABLE_PROGRESS_BARS` environment variable.
|
| 111 |
+
|
| 112 |
+
Returns:
|
| 113 |
+
`str`: folder path of the repo snapshot.
|
| 114 |
+
|
| 115 |
+
Raises:
|
| 116 |
+
[`~utils.RepositoryNotFoundError`]
|
| 117 |
+
If the repository to download from cannot be found. This may be because it doesn't exist,
|
| 118 |
+
or because it is set to `private` and you do not have access.
|
| 119 |
+
[`~utils.RevisionNotFoundError`]
|
| 120 |
+
If the revision to download from cannot be found.
|
| 121 |
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
| 122 |
+
If `token=True` and the token cannot be found.
|
| 123 |
+
[`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if
|
| 124 |
+
ETag cannot be determined.
|
| 125 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 126 |
+
if some parameter value is invalid.
|
| 127 |
+
"""
|
| 128 |
+
if cache_dir is None:
|
| 129 |
+
cache_dir = constants.HF_HUB_CACHE
|
| 130 |
+
if revision is None:
|
| 131 |
+
revision = constants.DEFAULT_REVISION
|
| 132 |
+
if isinstance(cache_dir, Path):
|
| 133 |
+
cache_dir = str(cache_dir)
|
| 134 |
+
|
| 135 |
+
if repo_type is None:
|
| 136 |
+
repo_type = "model"
|
| 137 |
+
if repo_type not in constants.REPO_TYPES:
|
| 138 |
+
raise ValueError(f"Invalid repo type: {repo_type}. Accepted repo types are: {str(constants.REPO_TYPES)}")
|
| 139 |
+
|
| 140 |
+
storage_folder = os.path.join(cache_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type))
|
| 141 |
+
|
| 142 |
+
repo_info: Union[ModelInfo, DatasetInfo, SpaceInfo, None] = None
|
| 143 |
+
api_call_error: Optional[Exception] = None
|
| 144 |
+
if not local_files_only:
|
| 145 |
+
# try/except logic to handle different errors => taken from `hf_hub_download`
|
| 146 |
+
try:
|
| 147 |
+
# if we have internet connection we want to list files to download
|
| 148 |
+
api = HfApi(
|
| 149 |
+
library_name=library_name,
|
| 150 |
+
library_version=library_version,
|
| 151 |
+
user_agent=user_agent,
|
| 152 |
+
endpoint=endpoint,
|
| 153 |
+
headers=headers,
|
| 154 |
+
)
|
| 155 |
+
repo_info = api.repo_info(repo_id=repo_id, repo_type=repo_type, revision=revision, token=token)
|
| 156 |
+
except (requests.exceptions.SSLError, requests.exceptions.ProxyError):
|
| 157 |
+
# Actually raise for those subclasses of ConnectionError
|
| 158 |
+
raise
|
| 159 |
+
except (
|
| 160 |
+
requests.exceptions.ConnectionError,
|
| 161 |
+
requests.exceptions.Timeout,
|
| 162 |
+
OfflineModeIsEnabled,
|
| 163 |
+
) as error:
|
| 164 |
+
# Internet connection is down
|
| 165 |
+
# => will try to use local files only
|
| 166 |
+
api_call_error = error
|
| 167 |
+
pass
|
| 168 |
+
except RevisionNotFoundError:
|
| 169 |
+
# The repo was found but the revision doesn't exist on the Hub (never existed or got deleted)
|
| 170 |
+
raise
|
| 171 |
+
except requests.HTTPError as error:
|
| 172 |
+
# Multiple reasons for an http error:
|
| 173 |
+
# - Repository is private and invalid/missing token sent
|
| 174 |
+
# - Repository is gated and invalid/missing token sent
|
| 175 |
+
# - Hub is down (error 500 or 504)
|
| 176 |
+
# => let's switch to 'local_files_only=True' to check if the files are already cached.
|
| 177 |
+
# (if it's not the case, the error will be re-raised)
|
| 178 |
+
api_call_error = error
|
| 179 |
+
pass
|
| 180 |
+
|
| 181 |
+
# At this stage, if `repo_info` is None it means either:
|
| 182 |
+
# - internet connection is down
|
| 183 |
+
# - internet connection is deactivated (local_files_only=True or HF_HUB_OFFLINE=True)
|
| 184 |
+
# - repo is private/gated and invalid/missing token sent
|
| 185 |
+
# - Hub is down
|
| 186 |
+
# => let's look if we can find the appropriate folder in the cache:
|
| 187 |
+
# - if the specified revision is a commit hash, look inside "snapshots".
|
| 188 |
+
# - f the specified revision is a branch or tag, look inside "refs".
|
| 189 |
+
# => if local_dir is not None, we will return the path to the local folder if it exists.
|
| 190 |
+
if repo_info is None:
|
| 191 |
+
# Try to get which commit hash corresponds to the specified revision
|
| 192 |
+
commit_hash = None
|
| 193 |
+
if REGEX_COMMIT_HASH.match(revision):
|
| 194 |
+
commit_hash = revision
|
| 195 |
+
else:
|
| 196 |
+
ref_path = os.path.join(storage_folder, "refs", revision)
|
| 197 |
+
if os.path.exists(ref_path):
|
| 198 |
+
# retrieve commit_hash from refs file
|
| 199 |
+
with open(ref_path) as f:
|
| 200 |
+
commit_hash = f.read()
|
| 201 |
+
|
| 202 |
+
# Try to locate snapshot folder for this commit hash
|
| 203 |
+
if commit_hash is not None:
|
| 204 |
+
snapshot_folder = os.path.join(storage_folder, "snapshots", commit_hash)
|
| 205 |
+
if os.path.exists(snapshot_folder):
|
| 206 |
+
# Snapshot folder exists => let's return it
|
| 207 |
+
# (but we can't check if all the files are actually there)
|
| 208 |
+
return snapshot_folder
|
| 209 |
+
# If local_dir is not None, return it if it exists and is not empty
|
| 210 |
+
if local_dir is not None:
|
| 211 |
+
local_dir = Path(local_dir)
|
| 212 |
+
if local_dir.is_dir() and any(local_dir.iterdir()):
|
| 213 |
+
logger.warning(
|
| 214 |
+
f"Returning existing local_dir `{local_dir}` as remote repo cannot be accessed in `snapshot_download` ({api_call_error})."
|
| 215 |
+
)
|
| 216 |
+
return str(local_dir.resolve())
|
| 217 |
+
# If we couldn't find the appropriate folder on disk, raise an error.
|
| 218 |
+
if local_files_only:
|
| 219 |
+
raise LocalEntryNotFoundError(
|
| 220 |
+
"Cannot find an appropriate cached snapshot folder for the specified revision on the local disk and "
|
| 221 |
+
"outgoing traffic has been disabled. To enable repo look-ups and downloads online, pass "
|
| 222 |
+
"'local_files_only=False' as input."
|
| 223 |
+
)
|
| 224 |
+
elif isinstance(api_call_error, OfflineModeIsEnabled):
|
| 225 |
+
raise LocalEntryNotFoundError(
|
| 226 |
+
"Cannot find an appropriate cached snapshot folder for the specified revision on the local disk and "
|
| 227 |
+
"outgoing traffic has been disabled. To enable repo look-ups and downloads online, set "
|
| 228 |
+
"'HF_HUB_OFFLINE=0' as environment variable."
|
| 229 |
+
) from api_call_error
|
| 230 |
+
elif isinstance(api_call_error, RepositoryNotFoundError) or isinstance(api_call_error, GatedRepoError):
|
| 231 |
+
# Repo not found => let's raise the actual error
|
| 232 |
+
raise api_call_error
|
| 233 |
+
else:
|
| 234 |
+
# Otherwise: most likely a connection issue or Hub downtime => let's warn the user
|
| 235 |
+
raise LocalEntryNotFoundError(
|
| 236 |
+
"An error happened while trying to locate the files on the Hub and we cannot find the appropriate"
|
| 237 |
+
" snapshot folder for the specified revision on the local disk. Please check your internet connection"
|
| 238 |
+
" and try again."
|
| 239 |
+
) from api_call_error
|
| 240 |
+
|
| 241 |
+
# At this stage, internet connection is up and running
|
| 242 |
+
# => let's download the files!
|
| 243 |
+
assert repo_info.sha is not None, "Repo info returned from server must have a revision sha."
|
| 244 |
+
assert repo_info.siblings is not None, "Repo info returned from server must have a siblings list."
|
| 245 |
+
filtered_repo_files = list(
|
| 246 |
+
filter_repo_objects(
|
| 247 |
+
items=[f.rfilename for f in repo_info.siblings],
|
| 248 |
+
allow_patterns=allow_patterns,
|
| 249 |
+
ignore_patterns=ignore_patterns,
|
| 250 |
+
)
|
| 251 |
+
)
|
| 252 |
+
commit_hash = repo_info.sha
|
| 253 |
+
snapshot_folder = os.path.join(storage_folder, "snapshots", commit_hash)
|
| 254 |
+
# if passed revision is not identical to commit_hash
|
| 255 |
+
# then revision has to be a branch name or tag name.
|
| 256 |
+
# In that case store a ref.
|
| 257 |
+
if revision != commit_hash:
|
| 258 |
+
ref_path = os.path.join(storage_folder, "refs", revision)
|
| 259 |
+
try:
|
| 260 |
+
os.makedirs(os.path.dirname(ref_path), exist_ok=True)
|
| 261 |
+
with open(ref_path, "w") as f:
|
| 262 |
+
f.write(commit_hash)
|
| 263 |
+
except OSError as e:
|
| 264 |
+
logger.warning(f"Ignored error while writing commit hash to {ref_path}: {e}.")
|
| 265 |
+
|
| 266 |
+
# we pass the commit_hash to hf_hub_download
|
| 267 |
+
# so no network call happens if we already
|
| 268 |
+
# have the file locally.
|
| 269 |
+
def _inner_hf_hub_download(repo_file: str):
|
| 270 |
+
return hf_hub_download(
|
| 271 |
+
repo_id,
|
| 272 |
+
filename=repo_file,
|
| 273 |
+
repo_type=repo_type,
|
| 274 |
+
revision=commit_hash,
|
| 275 |
+
endpoint=endpoint,
|
| 276 |
+
cache_dir=cache_dir,
|
| 277 |
+
local_dir=local_dir,
|
| 278 |
+
local_dir_use_symlinks=local_dir_use_symlinks,
|
| 279 |
+
library_name=library_name,
|
| 280 |
+
library_version=library_version,
|
| 281 |
+
user_agent=user_agent,
|
| 282 |
+
proxies=proxies,
|
| 283 |
+
etag_timeout=etag_timeout,
|
| 284 |
+
resume_download=resume_download,
|
| 285 |
+
force_download=force_download,
|
| 286 |
+
token=token,
|
| 287 |
+
headers=headers,
|
| 288 |
+
)
|
| 289 |
+
|
| 290 |
+
if constants.HF_HUB_ENABLE_HF_TRANSFER:
|
| 291 |
+
# when using hf_transfer we don't want extra parallelism
|
| 292 |
+
# from the one hf_transfer provides
|
| 293 |
+
for file in filtered_repo_files:
|
| 294 |
+
_inner_hf_hub_download(file)
|
| 295 |
+
else:
|
| 296 |
+
thread_map(
|
| 297 |
+
_inner_hf_hub_download,
|
| 298 |
+
filtered_repo_files,
|
| 299 |
+
desc=f"Fetching {len(filtered_repo_files)} files",
|
| 300 |
+
max_workers=max_workers,
|
| 301 |
+
# User can use its own tqdm class or the default one from `huggingface_hub.utils`
|
| 302 |
+
tqdm_class=tqdm_class or hf_tqdm,
|
| 303 |
+
)
|
| 304 |
+
|
| 305 |
+
if local_dir is not None:
|
| 306 |
+
return str(os.path.realpath(local_dir))
|
| 307 |
+
return snapshot_folder
|
parrot/lib/python3.10/site-packages/huggingface_hub/_space_api.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2019-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
from dataclasses import dataclass
|
| 16 |
+
from datetime import datetime
|
| 17 |
+
from enum import Enum
|
| 18 |
+
from typing import Dict, Optional
|
| 19 |
+
|
| 20 |
+
from huggingface_hub.utils import parse_datetime
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class SpaceStage(str, Enum):
|
| 24 |
+
"""
|
| 25 |
+
Enumeration of possible stage of a Space on the Hub.
|
| 26 |
+
|
| 27 |
+
Value can be compared to a string:
|
| 28 |
+
```py
|
| 29 |
+
assert SpaceStage.BUILDING == "BUILDING"
|
| 30 |
+
```
|
| 31 |
+
|
| 32 |
+
Taken from https://github.com/huggingface/moon-landing/blob/main/server/repo_types/SpaceInfo.ts#L61 (private url).
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
# Copied from moon-landing > server > repo_types > SpaceInfo.ts (private repo)
|
| 36 |
+
NO_APP_FILE = "NO_APP_FILE"
|
| 37 |
+
CONFIG_ERROR = "CONFIG_ERROR"
|
| 38 |
+
BUILDING = "BUILDING"
|
| 39 |
+
BUILD_ERROR = "BUILD_ERROR"
|
| 40 |
+
RUNNING = "RUNNING"
|
| 41 |
+
RUNNING_BUILDING = "RUNNING_BUILDING"
|
| 42 |
+
RUNTIME_ERROR = "RUNTIME_ERROR"
|
| 43 |
+
DELETING = "DELETING"
|
| 44 |
+
STOPPED = "STOPPED"
|
| 45 |
+
PAUSED = "PAUSED"
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class SpaceHardware(str, Enum):
|
| 49 |
+
"""
|
| 50 |
+
Enumeration of hardwares available to run your Space on the Hub.
|
| 51 |
+
|
| 52 |
+
Value can be compared to a string:
|
| 53 |
+
```py
|
| 54 |
+
assert SpaceHardware.CPU_BASIC == "cpu-basic"
|
| 55 |
+
```
|
| 56 |
+
|
| 57 |
+
Taken from https://github.com/huggingface/moon-landing/blob/main/server/repo_types/SpaceInfo.ts#L73 (private url).
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
CPU_BASIC = "cpu-basic"
|
| 61 |
+
CPU_UPGRADE = "cpu-upgrade"
|
| 62 |
+
T4_SMALL = "t4-small"
|
| 63 |
+
T4_MEDIUM = "t4-medium"
|
| 64 |
+
L4X1 = "l4x1"
|
| 65 |
+
L4X4 = "l4x4"
|
| 66 |
+
ZERO_A10G = "zero-a10g"
|
| 67 |
+
A10G_SMALL = "a10g-small"
|
| 68 |
+
A10G_LARGE = "a10g-large"
|
| 69 |
+
A10G_LARGEX2 = "a10g-largex2"
|
| 70 |
+
A10G_LARGEX4 = "a10g-largex4"
|
| 71 |
+
A100_LARGE = "a100-large"
|
| 72 |
+
V5E_1X1 = "v5e-1x1"
|
| 73 |
+
V5E_2X2 = "v5e-2x2"
|
| 74 |
+
V5E_2X4 = "v5e-2x4"
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class SpaceStorage(str, Enum):
|
| 78 |
+
"""
|
| 79 |
+
Enumeration of persistent storage available for your Space on the Hub.
|
| 80 |
+
|
| 81 |
+
Value can be compared to a string:
|
| 82 |
+
```py
|
| 83 |
+
assert SpaceStorage.SMALL == "small"
|
| 84 |
+
```
|
| 85 |
+
|
| 86 |
+
Taken from https://github.com/huggingface/moon-landing/blob/main/server/repo_types/SpaceHardwareFlavor.ts#L24 (private url).
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
SMALL = "small"
|
| 90 |
+
MEDIUM = "medium"
|
| 91 |
+
LARGE = "large"
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
@dataclass
|
| 95 |
+
class SpaceRuntime:
|
| 96 |
+
"""
|
| 97 |
+
Contains information about the current runtime of a Space.
|
| 98 |
+
|
| 99 |
+
Args:
|
| 100 |
+
stage (`str`):
|
| 101 |
+
Current stage of the space. Example: RUNNING.
|
| 102 |
+
hardware (`str` or `None`):
|
| 103 |
+
Current hardware of the space. Example: "cpu-basic". Can be `None` if Space
|
| 104 |
+
is `BUILDING` for the first time.
|
| 105 |
+
requested_hardware (`str` or `None`):
|
| 106 |
+
Requested hardware. Can be different than `hardware` especially if the request
|
| 107 |
+
has just been made. Example: "t4-medium". Can be `None` if no hardware has
|
| 108 |
+
been requested yet.
|
| 109 |
+
sleep_time (`int` or `None`):
|
| 110 |
+
Number of seconds the Space will be kept alive after the last request. By default (if value is `None`), the
|
| 111 |
+
Space will never go to sleep if it's running on an upgraded hardware, while it will go to sleep after 48
|
| 112 |
+
hours on a free 'cpu-basic' hardware. For more details, see https://huggingface.co/docs/hub/spaces-gpus#sleep-time.
|
| 113 |
+
raw (`dict`):
|
| 114 |
+
Raw response from the server. Contains more information about the Space
|
| 115 |
+
runtime like number of replicas, number of cpu, memory size,...
|
| 116 |
+
"""
|
| 117 |
+
|
| 118 |
+
stage: SpaceStage
|
| 119 |
+
hardware: Optional[SpaceHardware]
|
| 120 |
+
requested_hardware: Optional[SpaceHardware]
|
| 121 |
+
sleep_time: Optional[int]
|
| 122 |
+
storage: Optional[SpaceStorage]
|
| 123 |
+
raw: Dict
|
| 124 |
+
|
| 125 |
+
def __init__(self, data: Dict) -> None:
|
| 126 |
+
self.stage = data["stage"]
|
| 127 |
+
self.hardware = data.get("hardware", {}).get("current")
|
| 128 |
+
self.requested_hardware = data.get("hardware", {}).get("requested")
|
| 129 |
+
self.sleep_time = data.get("gcTimeout")
|
| 130 |
+
self.storage = data.get("storage")
|
| 131 |
+
self.raw = data
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
@dataclass
|
| 135 |
+
class SpaceVariable:
|
| 136 |
+
"""
|
| 137 |
+
Contains information about the current variables of a Space.
|
| 138 |
+
|
| 139 |
+
Args:
|
| 140 |
+
key (`str`):
|
| 141 |
+
Variable key. Example: `"MODEL_REPO_ID"`
|
| 142 |
+
value (`str`):
|
| 143 |
+
Variable value. Example: `"the_model_repo_id"`.
|
| 144 |
+
description (`str` or None):
|
| 145 |
+
Description of the variable. Example: `"Model Repo ID of the implemented model"`.
|
| 146 |
+
updatedAt (`datetime` or None):
|
| 147 |
+
datetime of the last update of the variable (if the variable has been updated at least once).
|
| 148 |
+
"""
|
| 149 |
+
|
| 150 |
+
key: str
|
| 151 |
+
value: str
|
| 152 |
+
description: Optional[str]
|
| 153 |
+
updated_at: Optional[datetime]
|
| 154 |
+
|
| 155 |
+
def __init__(self, key: str, values: Dict) -> None:
|
| 156 |
+
self.key = key
|
| 157 |
+
self.value = values["value"]
|
| 158 |
+
self.description = values.get("description")
|
| 159 |
+
updated_at = values.get("updatedAt")
|
| 160 |
+
self.updated_at = parse_datetime(updated_at) if updated_at is not None else None
|
parrot/lib/python3.10/site-packages/huggingface_hub/_tensorboard_logger.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2023 The HuggingFace Team. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Contains a logger to push training logs to the Hub, using Tensorboard."""
|
| 15 |
+
|
| 16 |
+
from pathlib import Path
|
| 17 |
+
from typing import TYPE_CHECKING, List, Optional, Union
|
| 18 |
+
|
| 19 |
+
from ._commit_scheduler import CommitScheduler
|
| 20 |
+
from .errors import EntryNotFoundError
|
| 21 |
+
from .repocard import ModelCard
|
| 22 |
+
from .utils import experimental
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# Depending on user's setup, SummaryWriter can come either from 'tensorboardX'
|
| 26 |
+
# or from 'torch.utils.tensorboard'. Both are compatible so let's try to load
|
| 27 |
+
# from either of them.
|
| 28 |
+
try:
|
| 29 |
+
from tensorboardX import SummaryWriter
|
| 30 |
+
|
| 31 |
+
is_summary_writer_available = True
|
| 32 |
+
|
| 33 |
+
except ImportError:
|
| 34 |
+
try:
|
| 35 |
+
from torch.utils.tensorboard import SummaryWriter
|
| 36 |
+
|
| 37 |
+
is_summary_writer_available = False
|
| 38 |
+
except ImportError:
|
| 39 |
+
# Dummy class to avoid failing at import. Will raise on instance creation.
|
| 40 |
+
SummaryWriter = object
|
| 41 |
+
is_summary_writer_available = False
|
| 42 |
+
|
| 43 |
+
if TYPE_CHECKING:
|
| 44 |
+
from tensorboardX import SummaryWriter
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class HFSummaryWriter(SummaryWriter):
|
| 48 |
+
"""
|
| 49 |
+
Wrapper around the tensorboard's `SummaryWriter` to push training logs to the Hub.
|
| 50 |
+
|
| 51 |
+
Data is logged locally and then pushed to the Hub asynchronously. Pushing data to the Hub is done in a separate
|
| 52 |
+
thread to avoid blocking the training script. In particular, if the upload fails for any reason (e.g. a connection
|
| 53 |
+
issue), the main script will not be interrupted. Data is automatically pushed to the Hub every `commit_every`
|
| 54 |
+
minutes (default to every 5 minutes).
|
| 55 |
+
|
| 56 |
+
<Tip warning={true}>
|
| 57 |
+
|
| 58 |
+
`HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
|
| 59 |
+
|
| 60 |
+
</Tip>
|
| 61 |
+
|
| 62 |
+
Args:
|
| 63 |
+
repo_id (`str`):
|
| 64 |
+
The id of the repo to which the logs will be pushed.
|
| 65 |
+
logdir (`str`, *optional*):
|
| 66 |
+
The directory where the logs will be written. If not specified, a local directory will be created by the
|
| 67 |
+
underlying `SummaryWriter` object.
|
| 68 |
+
commit_every (`int` or `float`, *optional*):
|
| 69 |
+
The frequency (in minutes) at which the logs will be pushed to the Hub. Defaults to 5 minutes.
|
| 70 |
+
squash_history (`bool`, *optional*):
|
| 71 |
+
Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is
|
| 72 |
+
useful to avoid degraded performances on the repo when it grows too large.
|
| 73 |
+
repo_type (`str`, *optional*):
|
| 74 |
+
The type of the repo to which the logs will be pushed. Defaults to "model".
|
| 75 |
+
repo_revision (`str`, *optional*):
|
| 76 |
+
The revision of the repo to which the logs will be pushed. Defaults to "main".
|
| 77 |
+
repo_private (`bool`, *optional*):
|
| 78 |
+
Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
|
| 79 |
+
path_in_repo (`str`, *optional*):
|
| 80 |
+
The path to the folder in the repo where the logs will be pushed. Defaults to "tensorboard/".
|
| 81 |
+
repo_allow_patterns (`List[str]` or `str`, *optional*):
|
| 82 |
+
A list of patterns to include in the upload. Defaults to `"*.tfevents.*"`. Check out the
|
| 83 |
+
[upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details.
|
| 84 |
+
repo_ignore_patterns (`List[str]` or `str`, *optional*):
|
| 85 |
+
A list of patterns to exclude in the upload. Check out the
|
| 86 |
+
[upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details.
|
| 87 |
+
token (`str`, *optional*):
|
| 88 |
+
Authentication token. Will default to the stored token. See https://huggingface.co/settings/token for more
|
| 89 |
+
details
|
| 90 |
+
kwargs:
|
| 91 |
+
Additional keyword arguments passed to `SummaryWriter`.
|
| 92 |
+
|
| 93 |
+
Examples:
|
| 94 |
+
```diff
|
| 95 |
+
# Taken from https://pytorch.org/docs/stable/tensorboard.html
|
| 96 |
+
- from torch.utils.tensorboard import SummaryWriter
|
| 97 |
+
+ from huggingface_hub import HFSummaryWriter
|
| 98 |
+
|
| 99 |
+
import numpy as np
|
| 100 |
+
|
| 101 |
+
- writer = SummaryWriter()
|
| 102 |
+
+ writer = HFSummaryWriter(repo_id="username/my-trained-model")
|
| 103 |
+
|
| 104 |
+
for n_iter in range(100):
|
| 105 |
+
writer.add_scalar('Loss/train', np.random.random(), n_iter)
|
| 106 |
+
writer.add_scalar('Loss/test', np.random.random(), n_iter)
|
| 107 |
+
writer.add_scalar('Accuracy/train', np.random.random(), n_iter)
|
| 108 |
+
writer.add_scalar('Accuracy/test', np.random.random(), n_iter)
|
| 109 |
+
```
|
| 110 |
+
|
| 111 |
+
```py
|
| 112 |
+
>>> from huggingface_hub import HFSummaryWriter
|
| 113 |
+
|
| 114 |
+
# Logs are automatically pushed every 15 minutes (5 by default) + when exiting the context manager
|
| 115 |
+
>>> with HFSummaryWriter(repo_id="test_hf_logger", commit_every=15) as logger:
|
| 116 |
+
... logger.add_scalar("a", 1)
|
| 117 |
+
... logger.add_scalar("b", 2)
|
| 118 |
+
```
|
| 119 |
+
"""
|
| 120 |
+
|
| 121 |
+
@experimental
|
| 122 |
+
def __new__(cls, *args, **kwargs) -> "HFSummaryWriter":
|
| 123 |
+
if not is_summary_writer_available:
|
| 124 |
+
raise ImportError(
|
| 125 |
+
"You must have `tensorboard` installed to use `HFSummaryWriter`. Please run `pip install --upgrade"
|
| 126 |
+
" tensorboardX` first."
|
| 127 |
+
)
|
| 128 |
+
return super().__new__(cls)
|
| 129 |
+
|
| 130 |
+
def __init__(
|
| 131 |
+
self,
|
| 132 |
+
repo_id: str,
|
| 133 |
+
*,
|
| 134 |
+
logdir: Optional[str] = None,
|
| 135 |
+
commit_every: Union[int, float] = 5,
|
| 136 |
+
squash_history: bool = False,
|
| 137 |
+
repo_type: Optional[str] = None,
|
| 138 |
+
repo_revision: Optional[str] = None,
|
| 139 |
+
repo_private: Optional[bool] = None,
|
| 140 |
+
path_in_repo: Optional[str] = "tensorboard",
|
| 141 |
+
repo_allow_patterns: Optional[Union[List[str], str]] = "*.tfevents.*",
|
| 142 |
+
repo_ignore_patterns: Optional[Union[List[str], str]] = None,
|
| 143 |
+
token: Optional[str] = None,
|
| 144 |
+
**kwargs,
|
| 145 |
+
):
|
| 146 |
+
# Initialize SummaryWriter
|
| 147 |
+
super().__init__(logdir=logdir, **kwargs)
|
| 148 |
+
|
| 149 |
+
# Check logdir has been correctly initialized and fail early otherwise. In practice, SummaryWriter takes care of it.
|
| 150 |
+
if not isinstance(self.logdir, str):
|
| 151 |
+
raise ValueError(f"`self.logdir` must be a string. Got '{self.logdir}' of type {type(self.logdir)}.")
|
| 152 |
+
|
| 153 |
+
# Append logdir name to `path_in_repo`
|
| 154 |
+
if path_in_repo is None or path_in_repo == "":
|
| 155 |
+
path_in_repo = Path(self.logdir).name
|
| 156 |
+
else:
|
| 157 |
+
path_in_repo = path_in_repo.strip("/") + "/" + Path(self.logdir).name
|
| 158 |
+
|
| 159 |
+
# Initialize scheduler
|
| 160 |
+
self.scheduler = CommitScheduler(
|
| 161 |
+
folder_path=self.logdir,
|
| 162 |
+
path_in_repo=path_in_repo,
|
| 163 |
+
repo_id=repo_id,
|
| 164 |
+
repo_type=repo_type,
|
| 165 |
+
revision=repo_revision,
|
| 166 |
+
private=repo_private,
|
| 167 |
+
token=token,
|
| 168 |
+
allow_patterns=repo_allow_patterns,
|
| 169 |
+
ignore_patterns=repo_ignore_patterns,
|
| 170 |
+
every=commit_every,
|
| 171 |
+
squash_history=squash_history,
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
# Exposing some high-level info at root level
|
| 175 |
+
self.repo_id = self.scheduler.repo_id
|
| 176 |
+
self.repo_type = self.scheduler.repo_type
|
| 177 |
+
self.repo_revision = self.scheduler.revision
|
| 178 |
+
|
| 179 |
+
# Add `hf-summary-writer` tag to the model card metadata
|
| 180 |
+
try:
|
| 181 |
+
card = ModelCard.load(repo_id_or_path=self.repo_id, repo_type=self.repo_type)
|
| 182 |
+
except EntryNotFoundError:
|
| 183 |
+
card = ModelCard("")
|
| 184 |
+
tags = card.data.get("tags", [])
|
| 185 |
+
if "hf-summary-writer" not in tags:
|
| 186 |
+
tags.append("hf-summary-writer")
|
| 187 |
+
card.data["tags"] = tags
|
| 188 |
+
card.push_to_hub(repo_id=self.repo_id, repo_type=self.repo_type)
|
| 189 |
+
|
| 190 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 191 |
+
"""Push to hub in a non-blocking way when exiting the logger's context manager."""
|
| 192 |
+
super().__exit__(exc_type, exc_val, exc_tb)
|
| 193 |
+
future = self.scheduler.trigger()
|
| 194 |
+
future.result()
|
parrot/lib/python3.10/site-packages/huggingface_hub/_upload_large_folder.py
ADDED
|
@@ -0,0 +1,622 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2024-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
import enum
|
| 16 |
+
import logging
|
| 17 |
+
import os
|
| 18 |
+
import queue
|
| 19 |
+
import shutil
|
| 20 |
+
import sys
|
| 21 |
+
import threading
|
| 22 |
+
import time
|
| 23 |
+
import traceback
|
| 24 |
+
from datetime import datetime
|
| 25 |
+
from pathlib import Path
|
| 26 |
+
from threading import Lock
|
| 27 |
+
from typing import TYPE_CHECKING, List, Optional, Tuple, Union
|
| 28 |
+
from urllib.parse import quote
|
| 29 |
+
|
| 30 |
+
from . import constants
|
| 31 |
+
from ._commit_api import CommitOperationAdd, UploadInfo, _fetch_upload_modes
|
| 32 |
+
from ._local_folder import LocalUploadFileMetadata, LocalUploadFilePaths, get_local_upload_paths, read_upload_metadata
|
| 33 |
+
from .constants import DEFAULT_REVISION, REPO_TYPES
|
| 34 |
+
from .utils import DEFAULT_IGNORE_PATTERNS, filter_repo_objects, tqdm
|
| 35 |
+
from .utils._cache_manager import _format_size
|
| 36 |
+
from .utils.sha import sha_fileobj
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
if TYPE_CHECKING:
|
| 40 |
+
from .hf_api import HfApi
|
| 41 |
+
|
| 42 |
+
logger = logging.getLogger(__name__)
|
| 43 |
+
|
| 44 |
+
WAITING_TIME_IF_NO_TASKS = 10 # seconds
|
| 45 |
+
MAX_NB_REGULAR_FILES_PER_COMMIT = 75
|
| 46 |
+
MAX_NB_LFS_FILES_PER_COMMIT = 150
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def upload_large_folder_internal(
|
| 50 |
+
api: "HfApi",
|
| 51 |
+
repo_id: str,
|
| 52 |
+
folder_path: Union[str, Path],
|
| 53 |
+
*,
|
| 54 |
+
repo_type: str, # Repo type is required!
|
| 55 |
+
revision: Optional[str] = None,
|
| 56 |
+
private: Optional[bool] = None,
|
| 57 |
+
allow_patterns: Optional[Union[List[str], str]] = None,
|
| 58 |
+
ignore_patterns: Optional[Union[List[str], str]] = None,
|
| 59 |
+
num_workers: Optional[int] = None,
|
| 60 |
+
print_report: bool = True,
|
| 61 |
+
print_report_every: int = 60,
|
| 62 |
+
):
|
| 63 |
+
"""Upload a large folder to the Hub in the most resilient way possible.
|
| 64 |
+
|
| 65 |
+
See [`HfApi.upload_large_folder`] for the full documentation.
|
| 66 |
+
"""
|
| 67 |
+
# 1. Check args and setup
|
| 68 |
+
if repo_type is None:
|
| 69 |
+
raise ValueError(
|
| 70 |
+
"For large uploads, `repo_type` is explicitly required. Please set it to `model`, `dataset` or `space`."
|
| 71 |
+
" If you are using the CLI, pass it as `--repo-type=model`."
|
| 72 |
+
)
|
| 73 |
+
if repo_type not in REPO_TYPES:
|
| 74 |
+
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
|
| 75 |
+
if revision is None:
|
| 76 |
+
revision = DEFAULT_REVISION
|
| 77 |
+
|
| 78 |
+
folder_path = Path(folder_path).expanduser().resolve()
|
| 79 |
+
if not folder_path.is_dir():
|
| 80 |
+
raise ValueError(f"Provided path: '{folder_path}' is not a directory")
|
| 81 |
+
|
| 82 |
+
if ignore_patterns is None:
|
| 83 |
+
ignore_patterns = []
|
| 84 |
+
elif isinstance(ignore_patterns, str):
|
| 85 |
+
ignore_patterns = [ignore_patterns]
|
| 86 |
+
ignore_patterns += DEFAULT_IGNORE_PATTERNS
|
| 87 |
+
|
| 88 |
+
if num_workers is None:
|
| 89 |
+
nb_cores = os.cpu_count() or 1
|
| 90 |
+
num_workers = max(nb_cores - 2, 2) # Use all but 2 cores, or at least 2 cores
|
| 91 |
+
|
| 92 |
+
# 2. Create repo if missing
|
| 93 |
+
repo_url = api.create_repo(repo_id=repo_id, repo_type=repo_type, private=private, exist_ok=True)
|
| 94 |
+
logger.info(f"Repo created: {repo_url}")
|
| 95 |
+
repo_id = repo_url.repo_id
|
| 96 |
+
|
| 97 |
+
# 3. List files to upload
|
| 98 |
+
filtered_paths_list = filter_repo_objects(
|
| 99 |
+
(path.relative_to(folder_path).as_posix() for path in folder_path.glob("**/*") if path.is_file()),
|
| 100 |
+
allow_patterns=allow_patterns,
|
| 101 |
+
ignore_patterns=ignore_patterns,
|
| 102 |
+
)
|
| 103 |
+
paths_list = [get_local_upload_paths(folder_path, relpath) for relpath in filtered_paths_list]
|
| 104 |
+
logger.info(f"Found {len(paths_list)} candidate files to upload")
|
| 105 |
+
|
| 106 |
+
# Read metadata for each file
|
| 107 |
+
items = [
|
| 108 |
+
(paths, read_upload_metadata(folder_path, paths.path_in_repo))
|
| 109 |
+
for paths in tqdm(paths_list, desc="Recovering from metadata files")
|
| 110 |
+
]
|
| 111 |
+
|
| 112 |
+
# 4. Start workers
|
| 113 |
+
status = LargeUploadStatus(items)
|
| 114 |
+
threads = [
|
| 115 |
+
threading.Thread(
|
| 116 |
+
target=_worker_job,
|
| 117 |
+
kwargs={
|
| 118 |
+
"status": status,
|
| 119 |
+
"api": api,
|
| 120 |
+
"repo_id": repo_id,
|
| 121 |
+
"repo_type": repo_type,
|
| 122 |
+
"revision": revision,
|
| 123 |
+
},
|
| 124 |
+
)
|
| 125 |
+
for _ in range(num_workers)
|
| 126 |
+
]
|
| 127 |
+
|
| 128 |
+
for thread in threads:
|
| 129 |
+
thread.start()
|
| 130 |
+
|
| 131 |
+
# 5. Print regular reports
|
| 132 |
+
if print_report:
|
| 133 |
+
print("\n\n" + status.current_report())
|
| 134 |
+
last_report_ts = time.time()
|
| 135 |
+
while True:
|
| 136 |
+
time.sleep(1)
|
| 137 |
+
if time.time() - last_report_ts >= print_report_every:
|
| 138 |
+
if print_report:
|
| 139 |
+
_print_overwrite(status.current_report())
|
| 140 |
+
last_report_ts = time.time()
|
| 141 |
+
if status.is_done():
|
| 142 |
+
logging.info("Is done: exiting main loop")
|
| 143 |
+
break
|
| 144 |
+
|
| 145 |
+
for thread in threads:
|
| 146 |
+
thread.join()
|
| 147 |
+
|
| 148 |
+
logger.info(status.current_report())
|
| 149 |
+
logging.info("Upload is complete!")
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
####################
|
| 153 |
+
# Logic to manage workers and synchronize tasks
|
| 154 |
+
####################
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
class WorkerJob(enum.Enum):
|
| 158 |
+
SHA256 = enum.auto()
|
| 159 |
+
GET_UPLOAD_MODE = enum.auto()
|
| 160 |
+
PREUPLOAD_LFS = enum.auto()
|
| 161 |
+
COMMIT = enum.auto()
|
| 162 |
+
WAIT = enum.auto() # if no tasks are available but we don't want to exit
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
JOB_ITEM_T = Tuple[LocalUploadFilePaths, LocalUploadFileMetadata]
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
class LargeUploadStatus:
|
| 169 |
+
"""Contains information, queues and tasks for a large upload process."""
|
| 170 |
+
|
| 171 |
+
def __init__(self, items: List[JOB_ITEM_T]):
|
| 172 |
+
self.items = items
|
| 173 |
+
self.queue_sha256: "queue.Queue[JOB_ITEM_T]" = queue.Queue()
|
| 174 |
+
self.queue_get_upload_mode: "queue.Queue[JOB_ITEM_T]" = queue.Queue()
|
| 175 |
+
self.queue_preupload_lfs: "queue.Queue[JOB_ITEM_T]" = queue.Queue()
|
| 176 |
+
self.queue_commit: "queue.Queue[JOB_ITEM_T]" = queue.Queue()
|
| 177 |
+
self.lock = Lock()
|
| 178 |
+
|
| 179 |
+
self.nb_workers_sha256: int = 0
|
| 180 |
+
self.nb_workers_get_upload_mode: int = 0
|
| 181 |
+
self.nb_workers_preupload_lfs: int = 0
|
| 182 |
+
self.nb_workers_commit: int = 0
|
| 183 |
+
self.nb_workers_waiting: int = 0
|
| 184 |
+
self.last_commit_attempt: Optional[float] = None
|
| 185 |
+
|
| 186 |
+
self._started_at = datetime.now()
|
| 187 |
+
|
| 188 |
+
# Setup queues
|
| 189 |
+
for item in self.items:
|
| 190 |
+
paths, metadata = item
|
| 191 |
+
if metadata.sha256 is None:
|
| 192 |
+
self.queue_sha256.put(item)
|
| 193 |
+
elif metadata.upload_mode is None:
|
| 194 |
+
self.queue_get_upload_mode.put(item)
|
| 195 |
+
elif metadata.upload_mode == "lfs" and not metadata.is_uploaded:
|
| 196 |
+
self.queue_preupload_lfs.put(item)
|
| 197 |
+
elif not metadata.is_committed:
|
| 198 |
+
self.queue_commit.put(item)
|
| 199 |
+
else:
|
| 200 |
+
logger.debug(f"Skipping file {paths.path_in_repo} (already uploaded and committed)")
|
| 201 |
+
|
| 202 |
+
def current_report(self) -> str:
|
| 203 |
+
"""Generate a report of the current status of the large upload."""
|
| 204 |
+
nb_hashed = 0
|
| 205 |
+
size_hashed = 0
|
| 206 |
+
nb_preuploaded = 0
|
| 207 |
+
nb_lfs = 0
|
| 208 |
+
nb_lfs_unsure = 0
|
| 209 |
+
size_preuploaded = 0
|
| 210 |
+
nb_committed = 0
|
| 211 |
+
size_committed = 0
|
| 212 |
+
total_size = 0
|
| 213 |
+
ignored_files = 0
|
| 214 |
+
total_files = 0
|
| 215 |
+
|
| 216 |
+
with self.lock:
|
| 217 |
+
for _, metadata in self.items:
|
| 218 |
+
if metadata.should_ignore:
|
| 219 |
+
ignored_files += 1
|
| 220 |
+
continue
|
| 221 |
+
total_size += metadata.size
|
| 222 |
+
total_files += 1
|
| 223 |
+
if metadata.sha256 is not None:
|
| 224 |
+
nb_hashed += 1
|
| 225 |
+
size_hashed += metadata.size
|
| 226 |
+
if metadata.upload_mode == "lfs":
|
| 227 |
+
nb_lfs += 1
|
| 228 |
+
if metadata.upload_mode is None:
|
| 229 |
+
nb_lfs_unsure += 1
|
| 230 |
+
if metadata.is_uploaded:
|
| 231 |
+
nb_preuploaded += 1
|
| 232 |
+
size_preuploaded += metadata.size
|
| 233 |
+
if metadata.is_committed:
|
| 234 |
+
nb_committed += 1
|
| 235 |
+
size_committed += metadata.size
|
| 236 |
+
total_size_str = _format_size(total_size)
|
| 237 |
+
|
| 238 |
+
now = datetime.now()
|
| 239 |
+
now_str = now.strftime("%Y-%m-%d %H:%M:%S")
|
| 240 |
+
elapsed = now - self._started_at
|
| 241 |
+
elapsed_str = str(elapsed).split(".")[0] # remove milliseconds
|
| 242 |
+
|
| 243 |
+
message = "\n" + "-" * 10
|
| 244 |
+
message += f" {now_str} ({elapsed_str}) "
|
| 245 |
+
message += "-" * 10 + "\n"
|
| 246 |
+
|
| 247 |
+
message += "Files: "
|
| 248 |
+
message += f"hashed {nb_hashed}/{total_files} ({_format_size(size_hashed)}/{total_size_str}) | "
|
| 249 |
+
message += f"pre-uploaded: {nb_preuploaded}/{nb_lfs} ({_format_size(size_preuploaded)}/{total_size_str})"
|
| 250 |
+
if nb_lfs_unsure > 0:
|
| 251 |
+
message += f" (+{nb_lfs_unsure} unsure)"
|
| 252 |
+
message += f" | committed: {nb_committed}/{total_files} ({_format_size(size_committed)}/{total_size_str})"
|
| 253 |
+
message += f" | ignored: {ignored_files}\n"
|
| 254 |
+
|
| 255 |
+
message += "Workers: "
|
| 256 |
+
message += f"hashing: {self.nb_workers_sha256} | "
|
| 257 |
+
message += f"get upload mode: {self.nb_workers_get_upload_mode} | "
|
| 258 |
+
message += f"pre-uploading: {self.nb_workers_preupload_lfs} | "
|
| 259 |
+
message += f"committing: {self.nb_workers_commit} | "
|
| 260 |
+
message += f"waiting: {self.nb_workers_waiting}\n"
|
| 261 |
+
message += "-" * 51
|
| 262 |
+
|
| 263 |
+
return message
|
| 264 |
+
|
| 265 |
+
def is_done(self) -> bool:
|
| 266 |
+
with self.lock:
|
| 267 |
+
return all(metadata.is_committed or metadata.should_ignore for _, metadata in self.items)
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def _worker_job(
|
| 271 |
+
status: LargeUploadStatus,
|
| 272 |
+
api: "HfApi",
|
| 273 |
+
repo_id: str,
|
| 274 |
+
repo_type: str,
|
| 275 |
+
revision: str,
|
| 276 |
+
):
|
| 277 |
+
"""
|
| 278 |
+
Main process for a worker. The worker will perform tasks based on the priority list until all files are uploaded
|
| 279 |
+
and committed. If no tasks are available, the worker will wait for 10 seconds before checking again.
|
| 280 |
+
|
| 281 |
+
If a task fails for any reason, the item(s) are put back in the queue for another worker to pick up.
|
| 282 |
+
|
| 283 |
+
Read `upload_large_folder` docstring for more information on how tasks are prioritized.
|
| 284 |
+
"""
|
| 285 |
+
while True:
|
| 286 |
+
next_job: Optional[Tuple[WorkerJob, List[JOB_ITEM_T]]] = None
|
| 287 |
+
|
| 288 |
+
# Determine next task
|
| 289 |
+
next_job = _determine_next_job(status)
|
| 290 |
+
if next_job is None:
|
| 291 |
+
return
|
| 292 |
+
job, items = next_job
|
| 293 |
+
|
| 294 |
+
# Perform task
|
| 295 |
+
if job == WorkerJob.SHA256:
|
| 296 |
+
item = items[0] # single item
|
| 297 |
+
try:
|
| 298 |
+
_compute_sha256(item)
|
| 299 |
+
status.queue_get_upload_mode.put(item)
|
| 300 |
+
except KeyboardInterrupt:
|
| 301 |
+
raise
|
| 302 |
+
except Exception as e:
|
| 303 |
+
logger.error(f"Failed to compute sha256: {e}")
|
| 304 |
+
traceback.format_exc()
|
| 305 |
+
status.queue_sha256.put(item)
|
| 306 |
+
|
| 307 |
+
with status.lock:
|
| 308 |
+
status.nb_workers_sha256 -= 1
|
| 309 |
+
|
| 310 |
+
elif job == WorkerJob.GET_UPLOAD_MODE:
|
| 311 |
+
try:
|
| 312 |
+
_get_upload_mode(items, api=api, repo_id=repo_id, repo_type=repo_type, revision=revision)
|
| 313 |
+
except KeyboardInterrupt:
|
| 314 |
+
raise
|
| 315 |
+
except Exception as e:
|
| 316 |
+
logger.error(f"Failed to get upload mode: {e}")
|
| 317 |
+
traceback.format_exc()
|
| 318 |
+
|
| 319 |
+
# Items are either:
|
| 320 |
+
# - dropped (if should_ignore)
|
| 321 |
+
# - put in LFS queue (if LFS)
|
| 322 |
+
# - put in commit queue (if regular)
|
| 323 |
+
# - or put back (if error occurred).
|
| 324 |
+
for item in items:
|
| 325 |
+
_, metadata = item
|
| 326 |
+
if metadata.should_ignore:
|
| 327 |
+
continue
|
| 328 |
+
if metadata.upload_mode == "lfs":
|
| 329 |
+
status.queue_preupload_lfs.put(item)
|
| 330 |
+
elif metadata.upload_mode == "regular":
|
| 331 |
+
status.queue_commit.put(item)
|
| 332 |
+
else:
|
| 333 |
+
status.queue_get_upload_mode.put(item)
|
| 334 |
+
|
| 335 |
+
with status.lock:
|
| 336 |
+
status.nb_workers_get_upload_mode -= 1
|
| 337 |
+
|
| 338 |
+
elif job == WorkerJob.PREUPLOAD_LFS:
|
| 339 |
+
item = items[0] # single item
|
| 340 |
+
try:
|
| 341 |
+
_preupload_lfs(item, api=api, repo_id=repo_id, repo_type=repo_type, revision=revision)
|
| 342 |
+
status.queue_commit.put(item)
|
| 343 |
+
except KeyboardInterrupt:
|
| 344 |
+
raise
|
| 345 |
+
except Exception as e:
|
| 346 |
+
logger.error(f"Failed to preupload LFS: {e}")
|
| 347 |
+
traceback.format_exc()
|
| 348 |
+
status.queue_preupload_lfs.put(item)
|
| 349 |
+
|
| 350 |
+
with status.lock:
|
| 351 |
+
status.nb_workers_preupload_lfs -= 1
|
| 352 |
+
|
| 353 |
+
elif job == WorkerJob.COMMIT:
|
| 354 |
+
try:
|
| 355 |
+
_commit(items, api=api, repo_id=repo_id, repo_type=repo_type, revision=revision)
|
| 356 |
+
except KeyboardInterrupt:
|
| 357 |
+
raise
|
| 358 |
+
except Exception as e:
|
| 359 |
+
logger.error(f"Failed to commit: {e}")
|
| 360 |
+
traceback.format_exc()
|
| 361 |
+
for item in items:
|
| 362 |
+
status.queue_commit.put(item)
|
| 363 |
+
with status.lock:
|
| 364 |
+
status.last_commit_attempt = time.time()
|
| 365 |
+
status.nb_workers_commit -= 1
|
| 366 |
+
|
| 367 |
+
elif job == WorkerJob.WAIT:
|
| 368 |
+
time.sleep(WAITING_TIME_IF_NO_TASKS)
|
| 369 |
+
with status.lock:
|
| 370 |
+
status.nb_workers_waiting -= 1
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
def _determine_next_job(status: LargeUploadStatus) -> Optional[Tuple[WorkerJob, List[JOB_ITEM_T]]]:
|
| 374 |
+
with status.lock:
|
| 375 |
+
# 1. Commit if more than 5 minutes since last commit attempt (and at least 1 file)
|
| 376 |
+
if (
|
| 377 |
+
status.nb_workers_commit == 0
|
| 378 |
+
and status.queue_commit.qsize() > 0
|
| 379 |
+
and status.last_commit_attempt is not None
|
| 380 |
+
and time.time() - status.last_commit_attempt > 5 * 60
|
| 381 |
+
):
|
| 382 |
+
status.nb_workers_commit += 1
|
| 383 |
+
logger.debug("Job: commit (more than 5 minutes since last commit attempt)")
|
| 384 |
+
return (WorkerJob.COMMIT, _get_items_to_commit(status.queue_commit))
|
| 385 |
+
|
| 386 |
+
# 2. Commit if at least 100 files are ready to commit
|
| 387 |
+
elif status.nb_workers_commit == 0 and status.queue_commit.qsize() >= 150:
|
| 388 |
+
status.nb_workers_commit += 1
|
| 389 |
+
logger.debug("Job: commit (>100 files ready)")
|
| 390 |
+
return (WorkerJob.COMMIT, _get_items_to_commit(status.queue_commit))
|
| 391 |
+
|
| 392 |
+
# 3. Get upload mode if at least 10 files
|
| 393 |
+
elif status.queue_get_upload_mode.qsize() >= 10:
|
| 394 |
+
status.nb_workers_get_upload_mode += 1
|
| 395 |
+
logger.debug("Job: get upload mode (>10 files ready)")
|
| 396 |
+
return (WorkerJob.GET_UPLOAD_MODE, _get_n(status.queue_get_upload_mode, 50))
|
| 397 |
+
|
| 398 |
+
# 4. Preupload LFS file if at least 1 file and no worker is preuploading LFS
|
| 399 |
+
elif status.queue_preupload_lfs.qsize() > 0 and status.nb_workers_preupload_lfs == 0:
|
| 400 |
+
status.nb_workers_preupload_lfs += 1
|
| 401 |
+
logger.debug("Job: preupload LFS (no other worker preuploading LFS)")
|
| 402 |
+
return (WorkerJob.PREUPLOAD_LFS, _get_one(status.queue_preupload_lfs))
|
| 403 |
+
|
| 404 |
+
# 5. Compute sha256 if at least 1 file and no worker is computing sha256
|
| 405 |
+
elif status.queue_sha256.qsize() > 0 and status.nb_workers_sha256 == 0:
|
| 406 |
+
status.nb_workers_sha256 += 1
|
| 407 |
+
logger.debug("Job: sha256 (no other worker computing sha256)")
|
| 408 |
+
return (WorkerJob.SHA256, _get_one(status.queue_sha256))
|
| 409 |
+
|
| 410 |
+
# 6. Get upload mode if at least 1 file and no worker is getting upload mode
|
| 411 |
+
elif status.queue_get_upload_mode.qsize() > 0 and status.nb_workers_get_upload_mode == 0:
|
| 412 |
+
status.nb_workers_get_upload_mode += 1
|
| 413 |
+
logger.debug("Job: get upload mode (no other worker getting upload mode)")
|
| 414 |
+
return (WorkerJob.GET_UPLOAD_MODE, _get_n(status.queue_get_upload_mode, 50))
|
| 415 |
+
|
| 416 |
+
# 7. Preupload LFS file if at least 1 file
|
| 417 |
+
# Skip if hf_transfer is enabled and there is already a worker preuploading LFS
|
| 418 |
+
elif status.queue_preupload_lfs.qsize() > 0 and (
|
| 419 |
+
status.nb_workers_preupload_lfs == 0 or not constants.HF_HUB_ENABLE_HF_TRANSFER
|
| 420 |
+
):
|
| 421 |
+
status.nb_workers_preupload_lfs += 1
|
| 422 |
+
logger.debug("Job: preupload LFS")
|
| 423 |
+
return (WorkerJob.PREUPLOAD_LFS, _get_one(status.queue_preupload_lfs))
|
| 424 |
+
|
| 425 |
+
# 8. Compute sha256 if at least 1 file
|
| 426 |
+
elif status.queue_sha256.qsize() > 0:
|
| 427 |
+
status.nb_workers_sha256 += 1
|
| 428 |
+
logger.debug("Job: sha256")
|
| 429 |
+
return (WorkerJob.SHA256, _get_one(status.queue_sha256))
|
| 430 |
+
|
| 431 |
+
# 9. Get upload mode if at least 1 file
|
| 432 |
+
elif status.queue_get_upload_mode.qsize() > 0:
|
| 433 |
+
status.nb_workers_get_upload_mode += 1
|
| 434 |
+
logger.debug("Job: get upload mode")
|
| 435 |
+
return (WorkerJob.GET_UPLOAD_MODE, _get_n(status.queue_get_upload_mode, 50))
|
| 436 |
+
|
| 437 |
+
# 10. Commit if at least 1 file and 1 min since last commit attempt
|
| 438 |
+
elif (
|
| 439 |
+
status.nb_workers_commit == 0
|
| 440 |
+
and status.queue_commit.qsize() > 0
|
| 441 |
+
and status.last_commit_attempt is not None
|
| 442 |
+
and time.time() - status.last_commit_attempt > 1 * 60
|
| 443 |
+
):
|
| 444 |
+
status.nb_workers_commit += 1
|
| 445 |
+
logger.debug("Job: commit (1 min since last commit attempt)")
|
| 446 |
+
return (WorkerJob.COMMIT, _get_items_to_commit(status.queue_commit))
|
| 447 |
+
|
| 448 |
+
# 11. Commit if at least 1 file all other queues are empty and all workers are waiting
|
| 449 |
+
# e.g. when it's the last commit
|
| 450 |
+
elif (
|
| 451 |
+
status.nb_workers_commit == 0
|
| 452 |
+
and status.queue_commit.qsize() > 0
|
| 453 |
+
and status.queue_sha256.qsize() == 0
|
| 454 |
+
and status.queue_get_upload_mode.qsize() == 0
|
| 455 |
+
and status.queue_preupload_lfs.qsize() == 0
|
| 456 |
+
and status.nb_workers_sha256 == 0
|
| 457 |
+
and status.nb_workers_get_upload_mode == 0
|
| 458 |
+
and status.nb_workers_preupload_lfs == 0
|
| 459 |
+
):
|
| 460 |
+
status.nb_workers_commit += 1
|
| 461 |
+
logger.debug("Job: commit")
|
| 462 |
+
return (WorkerJob.COMMIT, _get_items_to_commit(status.queue_commit))
|
| 463 |
+
|
| 464 |
+
# 12. If all queues are empty, exit
|
| 465 |
+
elif all(metadata.is_committed or metadata.should_ignore for _, metadata in status.items):
|
| 466 |
+
logger.info("All files have been processed! Exiting worker.")
|
| 467 |
+
return None
|
| 468 |
+
|
| 469 |
+
# 13. If no task is available, wait
|
| 470 |
+
else:
|
| 471 |
+
status.nb_workers_waiting += 1
|
| 472 |
+
logger.debug(f"No task available, waiting... ({WAITING_TIME_IF_NO_TASKS}s)")
|
| 473 |
+
return (WorkerJob.WAIT, [])
|
| 474 |
+
|
| 475 |
+
|
| 476 |
+
####################
|
| 477 |
+
# Atomic jobs (sha256, get_upload_mode, preupload_lfs, commit)
|
| 478 |
+
####################
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
def _compute_sha256(item: JOB_ITEM_T) -> None:
|
| 482 |
+
"""Compute sha256 of a file and save it in metadata."""
|
| 483 |
+
paths, metadata = item
|
| 484 |
+
if metadata.sha256 is None:
|
| 485 |
+
with paths.file_path.open("rb") as f:
|
| 486 |
+
metadata.sha256 = sha_fileobj(f).hex()
|
| 487 |
+
metadata.save(paths)
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
def _get_upload_mode(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
|
| 491 |
+
"""Get upload mode for each file and update metadata.
|
| 492 |
+
|
| 493 |
+
Also receive info if the file should be ignored.
|
| 494 |
+
"""
|
| 495 |
+
additions = [_build_hacky_operation(item) for item in items]
|
| 496 |
+
_fetch_upload_modes(
|
| 497 |
+
additions=additions,
|
| 498 |
+
repo_type=repo_type,
|
| 499 |
+
repo_id=repo_id,
|
| 500 |
+
headers=api._build_hf_headers(),
|
| 501 |
+
revision=quote(revision, safe=""),
|
| 502 |
+
)
|
| 503 |
+
for item, addition in zip(items, additions):
|
| 504 |
+
paths, metadata = item
|
| 505 |
+
metadata.upload_mode = addition._upload_mode
|
| 506 |
+
metadata.should_ignore = addition._should_ignore
|
| 507 |
+
metadata.save(paths)
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def _preupload_lfs(item: JOB_ITEM_T, api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
|
| 511 |
+
"""Preupload LFS file and update metadata."""
|
| 512 |
+
paths, metadata = item
|
| 513 |
+
addition = _build_hacky_operation(item)
|
| 514 |
+
api.preupload_lfs_files(
|
| 515 |
+
repo_id=repo_id,
|
| 516 |
+
repo_type=repo_type,
|
| 517 |
+
revision=revision,
|
| 518 |
+
additions=[addition],
|
| 519 |
+
)
|
| 520 |
+
|
| 521 |
+
metadata.is_uploaded = True
|
| 522 |
+
metadata.save(paths)
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def _commit(items: List[JOB_ITEM_T], api: "HfApi", repo_id: str, repo_type: str, revision: str) -> None:
|
| 526 |
+
"""Commit files to the repo."""
|
| 527 |
+
additions = [_build_hacky_operation(item) for item in items]
|
| 528 |
+
api.create_commit(
|
| 529 |
+
repo_id=repo_id,
|
| 530 |
+
repo_type=repo_type,
|
| 531 |
+
revision=revision,
|
| 532 |
+
operations=additions,
|
| 533 |
+
commit_message="Add files using upload-large-folder tool",
|
| 534 |
+
)
|
| 535 |
+
for paths, metadata in items:
|
| 536 |
+
metadata.is_committed = True
|
| 537 |
+
metadata.save(paths)
|
| 538 |
+
|
| 539 |
+
|
| 540 |
+
####################
|
| 541 |
+
# Hacks with CommitOperationAdd to bypass checks/sha256 calculation
|
| 542 |
+
####################
|
| 543 |
+
|
| 544 |
+
|
| 545 |
+
class HackyCommitOperationAdd(CommitOperationAdd):
|
| 546 |
+
def __post_init__(self) -> None:
|
| 547 |
+
if isinstance(self.path_or_fileobj, Path):
|
| 548 |
+
self.path_or_fileobj = str(self.path_or_fileobj)
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
def _build_hacky_operation(item: JOB_ITEM_T) -> HackyCommitOperationAdd:
|
| 552 |
+
paths, metadata = item
|
| 553 |
+
operation = HackyCommitOperationAdd(path_in_repo=paths.path_in_repo, path_or_fileobj=paths.file_path)
|
| 554 |
+
with paths.file_path.open("rb") as file:
|
| 555 |
+
sample = file.peek(512)[:512]
|
| 556 |
+
if metadata.sha256 is None:
|
| 557 |
+
raise ValueError("sha256 must have been computed by now!")
|
| 558 |
+
operation.upload_info = UploadInfo(sha256=bytes.fromhex(metadata.sha256), size=metadata.size, sample=sample)
|
| 559 |
+
return operation
|
| 560 |
+
|
| 561 |
+
|
| 562 |
+
####################
|
| 563 |
+
# Misc helpers
|
| 564 |
+
####################
|
| 565 |
+
|
| 566 |
+
|
| 567 |
+
def _get_one(queue: "queue.Queue[JOB_ITEM_T]") -> List[JOB_ITEM_T]:
|
| 568 |
+
return [queue.get()]
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def _get_n(queue: "queue.Queue[JOB_ITEM_T]", n: int) -> List[JOB_ITEM_T]:
|
| 572 |
+
return [queue.get() for _ in range(min(queue.qsize(), n))]
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
def _get_items_to_commit(queue: "queue.Queue[JOB_ITEM_T]") -> List[JOB_ITEM_T]:
|
| 576 |
+
"""Special case for commit job: the number of items to commit depends on the type of files."""
|
| 577 |
+
# Can take at most 50 regular files and/or 100 LFS files in a single commit
|
| 578 |
+
items: List[JOB_ITEM_T] = []
|
| 579 |
+
nb_lfs, nb_regular = 0, 0
|
| 580 |
+
while True:
|
| 581 |
+
# If empty queue => commit everything
|
| 582 |
+
if queue.qsize() == 0:
|
| 583 |
+
return items
|
| 584 |
+
|
| 585 |
+
# If we have enough items => commit them
|
| 586 |
+
if nb_lfs >= MAX_NB_LFS_FILES_PER_COMMIT or nb_regular >= MAX_NB_REGULAR_FILES_PER_COMMIT:
|
| 587 |
+
return items
|
| 588 |
+
|
| 589 |
+
# Else, get a new item and increase counter
|
| 590 |
+
item = queue.get()
|
| 591 |
+
items.append(item)
|
| 592 |
+
_, metadata = item
|
| 593 |
+
if metadata.upload_mode == "lfs":
|
| 594 |
+
nb_lfs += 1
|
| 595 |
+
else:
|
| 596 |
+
nb_regular += 1
|
| 597 |
+
|
| 598 |
+
|
| 599 |
+
def _print_overwrite(report: str) -> None:
|
| 600 |
+
"""Print a report, overwriting the previous lines.
|
| 601 |
+
|
| 602 |
+
Since tqdm in using `sys.stderr` to (re-)write progress bars, we need to use `sys.stdout`
|
| 603 |
+
to print the report.
|
| 604 |
+
|
| 605 |
+
Note: works well only if no other process is writing to `sys.stdout`!
|
| 606 |
+
"""
|
| 607 |
+
report += "\n"
|
| 608 |
+
# Get terminal width
|
| 609 |
+
terminal_width = shutil.get_terminal_size().columns
|
| 610 |
+
|
| 611 |
+
# Count number of lines that should be cleared
|
| 612 |
+
nb_lines = sum(len(line) // terminal_width + 1 for line in report.splitlines())
|
| 613 |
+
|
| 614 |
+
# Clear previous lines based on the number of lines in the report
|
| 615 |
+
for _ in range(nb_lines):
|
| 616 |
+
sys.stdout.write("\r\033[K") # Clear line
|
| 617 |
+
sys.stdout.write("\033[F") # Move cursor up one line
|
| 618 |
+
|
| 619 |
+
# Print the new report, filling remaining space with whitespace
|
| 620 |
+
sys.stdout.write(report)
|
| 621 |
+
sys.stdout.write(" " * (terminal_width - len(report.splitlines()[-1])))
|
| 622 |
+
sys.stdout.flush()
|
parrot/lib/python3.10/site-packages/huggingface_hub/_webhooks_payload.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2023-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains data structures to parse the webhooks payload."""
|
| 16 |
+
|
| 17 |
+
from typing import List, Literal, Optional
|
| 18 |
+
|
| 19 |
+
from .utils import is_pydantic_available
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
if is_pydantic_available():
|
| 23 |
+
from pydantic import BaseModel
|
| 24 |
+
else:
|
| 25 |
+
# Define a dummy BaseModel to avoid import errors when pydantic is not installed
|
| 26 |
+
# Import error will be raised when trying to use the class
|
| 27 |
+
|
| 28 |
+
class BaseModel: # type: ignore [no-redef]
|
| 29 |
+
def __init__(self, *args, **kwargs) -> None:
|
| 30 |
+
raise ImportError(
|
| 31 |
+
"You must have `pydantic` installed to use `WebhookPayload`. This is an optional dependency that"
|
| 32 |
+
" should be installed separately. Please run `pip install --upgrade pydantic` and retry."
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
# This is an adaptation of the ReportV3 interface implemented in moon-landing. V0, V1 and V2 have been ignored as they
|
| 37 |
+
# are not in used anymore. To keep in sync when format is updated in
|
| 38 |
+
# https://github.com/huggingface/moon-landing/blob/main/server/lib/HFWebhooks.ts (internal link).
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
WebhookEvent_T = Literal[
|
| 42 |
+
"create",
|
| 43 |
+
"delete",
|
| 44 |
+
"move",
|
| 45 |
+
"update",
|
| 46 |
+
]
|
| 47 |
+
RepoChangeEvent_T = Literal[
|
| 48 |
+
"add",
|
| 49 |
+
"move",
|
| 50 |
+
"remove",
|
| 51 |
+
"update",
|
| 52 |
+
]
|
| 53 |
+
RepoType_T = Literal[
|
| 54 |
+
"dataset",
|
| 55 |
+
"model",
|
| 56 |
+
"space",
|
| 57 |
+
]
|
| 58 |
+
DiscussionStatus_T = Literal[
|
| 59 |
+
"closed",
|
| 60 |
+
"draft",
|
| 61 |
+
"open",
|
| 62 |
+
"merged",
|
| 63 |
+
]
|
| 64 |
+
SupportedWebhookVersion = Literal[3]
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class ObjectId(BaseModel):
|
| 68 |
+
id: str
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class WebhookPayloadUrl(BaseModel):
|
| 72 |
+
web: str
|
| 73 |
+
api: Optional[str] = None
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class WebhookPayloadMovedTo(BaseModel):
|
| 77 |
+
name: str
|
| 78 |
+
owner: ObjectId
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class WebhookPayloadWebhook(ObjectId):
|
| 82 |
+
version: SupportedWebhookVersion
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class WebhookPayloadEvent(BaseModel):
|
| 86 |
+
action: WebhookEvent_T
|
| 87 |
+
scope: str
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class WebhookPayloadDiscussionChanges(BaseModel):
|
| 91 |
+
base: str
|
| 92 |
+
mergeCommitId: Optional[str] = None
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class WebhookPayloadComment(ObjectId):
|
| 96 |
+
author: ObjectId
|
| 97 |
+
hidden: bool
|
| 98 |
+
content: Optional[str] = None
|
| 99 |
+
url: WebhookPayloadUrl
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class WebhookPayloadDiscussion(ObjectId):
|
| 103 |
+
num: int
|
| 104 |
+
author: ObjectId
|
| 105 |
+
url: WebhookPayloadUrl
|
| 106 |
+
title: str
|
| 107 |
+
isPullRequest: bool
|
| 108 |
+
status: DiscussionStatus_T
|
| 109 |
+
changes: Optional[WebhookPayloadDiscussionChanges] = None
|
| 110 |
+
pinned: Optional[bool] = None
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class WebhookPayloadRepo(ObjectId):
|
| 114 |
+
owner: ObjectId
|
| 115 |
+
head_sha: Optional[str] = None
|
| 116 |
+
name: str
|
| 117 |
+
private: bool
|
| 118 |
+
subdomain: Optional[str] = None
|
| 119 |
+
tags: Optional[List[str]] = None
|
| 120 |
+
type: Literal["dataset", "model", "space"]
|
| 121 |
+
url: WebhookPayloadUrl
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class WebhookPayloadUpdatedRef(BaseModel):
|
| 125 |
+
ref: str
|
| 126 |
+
oldSha: Optional[str] = None
|
| 127 |
+
newSha: Optional[str] = None
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
class WebhookPayload(BaseModel):
|
| 131 |
+
event: WebhookPayloadEvent
|
| 132 |
+
repo: WebhookPayloadRepo
|
| 133 |
+
discussion: Optional[WebhookPayloadDiscussion] = None
|
| 134 |
+
comment: Optional[WebhookPayloadComment] = None
|
| 135 |
+
webhook: WebhookPayloadWebhook
|
| 136 |
+
movedTo: Optional[WebhookPayloadMovedTo] = None
|
| 137 |
+
updatedRefs: Optional[List[WebhookPayloadUpdatedRef]] = None
|
parrot/lib/python3.10/site-packages/huggingface_hub/community.py
ADDED
|
@@ -0,0 +1,355 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Data structures to interact with Discussions and Pull Requests on the Hub.
|
| 3 |
+
|
| 4 |
+
See [the Discussions and Pull Requests guide](https://huggingface.co/docs/hub/repositories-pull-requests-discussions)
|
| 5 |
+
for more information on Pull Requests, Discussions, and the community tab.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from dataclasses import dataclass
|
| 9 |
+
from datetime import datetime
|
| 10 |
+
from typing import List, Literal, Optional, Union
|
| 11 |
+
|
| 12 |
+
from . import constants
|
| 13 |
+
from .utils import parse_datetime
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
DiscussionStatus = Literal["open", "closed", "merged", "draft"]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@dataclass
|
| 20 |
+
class Discussion:
|
| 21 |
+
"""
|
| 22 |
+
A Discussion or Pull Request on the Hub.
|
| 23 |
+
|
| 24 |
+
This dataclass is not intended to be instantiated directly.
|
| 25 |
+
|
| 26 |
+
Attributes:
|
| 27 |
+
title (`str`):
|
| 28 |
+
The title of the Discussion / Pull Request
|
| 29 |
+
status (`str`):
|
| 30 |
+
The status of the Discussion / Pull Request.
|
| 31 |
+
It must be one of:
|
| 32 |
+
* `"open"`
|
| 33 |
+
* `"closed"`
|
| 34 |
+
* `"merged"` (only for Pull Requests )
|
| 35 |
+
* `"draft"` (only for Pull Requests )
|
| 36 |
+
num (`int`):
|
| 37 |
+
The number of the Discussion / Pull Request.
|
| 38 |
+
repo_id (`str`):
|
| 39 |
+
The id (`"{namespace}/{repo_name}"`) of the repo on which
|
| 40 |
+
the Discussion / Pull Request was open.
|
| 41 |
+
repo_type (`str`):
|
| 42 |
+
The type of the repo on which the Discussion / Pull Request was open.
|
| 43 |
+
Possible values are: `"model"`, `"dataset"`, `"space"`.
|
| 44 |
+
author (`str`):
|
| 45 |
+
The username of the Discussion / Pull Request author.
|
| 46 |
+
Can be `"deleted"` if the user has been deleted since.
|
| 47 |
+
is_pull_request (`bool`):
|
| 48 |
+
Whether or not this is a Pull Request.
|
| 49 |
+
created_at (`datetime`):
|
| 50 |
+
The `datetime` of creation of the Discussion / Pull Request.
|
| 51 |
+
endpoint (`str`):
|
| 52 |
+
Endpoint of the Hub. Default is https://huggingface.co.
|
| 53 |
+
git_reference (`str`, *optional*):
|
| 54 |
+
(property) Git reference to which changes can be pushed if this is a Pull Request, `None` otherwise.
|
| 55 |
+
url (`str`):
|
| 56 |
+
(property) URL of the discussion on the Hub.
|
| 57 |
+
"""
|
| 58 |
+
|
| 59 |
+
title: str
|
| 60 |
+
status: DiscussionStatus
|
| 61 |
+
num: int
|
| 62 |
+
repo_id: str
|
| 63 |
+
repo_type: str
|
| 64 |
+
author: str
|
| 65 |
+
is_pull_request: bool
|
| 66 |
+
created_at: datetime
|
| 67 |
+
endpoint: str
|
| 68 |
+
|
| 69 |
+
@property
|
| 70 |
+
def git_reference(self) -> Optional[str]:
|
| 71 |
+
"""
|
| 72 |
+
If this is a Pull Request , returns the git reference to which changes can be pushed.
|
| 73 |
+
Returns `None` otherwise.
|
| 74 |
+
"""
|
| 75 |
+
if self.is_pull_request:
|
| 76 |
+
return f"refs/pr/{self.num}"
|
| 77 |
+
return None
|
| 78 |
+
|
| 79 |
+
@property
|
| 80 |
+
def url(self) -> str:
|
| 81 |
+
"""Returns the URL of the discussion on the Hub."""
|
| 82 |
+
if self.repo_type is None or self.repo_type == constants.REPO_TYPE_MODEL:
|
| 83 |
+
return f"{self.endpoint}/{self.repo_id}/discussions/{self.num}"
|
| 84 |
+
return f"{self.endpoint}/{self.repo_type}s/{self.repo_id}/discussions/{self.num}"
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
@dataclass
|
| 88 |
+
class DiscussionWithDetails(Discussion):
|
| 89 |
+
"""
|
| 90 |
+
Subclass of [`Discussion`].
|
| 91 |
+
|
| 92 |
+
Attributes:
|
| 93 |
+
title (`str`):
|
| 94 |
+
The title of the Discussion / Pull Request
|
| 95 |
+
status (`str`):
|
| 96 |
+
The status of the Discussion / Pull Request.
|
| 97 |
+
It can be one of:
|
| 98 |
+
* `"open"`
|
| 99 |
+
* `"closed"`
|
| 100 |
+
* `"merged"` (only for Pull Requests )
|
| 101 |
+
* `"draft"` (only for Pull Requests )
|
| 102 |
+
num (`int`):
|
| 103 |
+
The number of the Discussion / Pull Request.
|
| 104 |
+
repo_id (`str`):
|
| 105 |
+
The id (`"{namespace}/{repo_name}"`) of the repo on which
|
| 106 |
+
the Discussion / Pull Request was open.
|
| 107 |
+
repo_type (`str`):
|
| 108 |
+
The type of the repo on which the Discussion / Pull Request was open.
|
| 109 |
+
Possible values are: `"model"`, `"dataset"`, `"space"`.
|
| 110 |
+
author (`str`):
|
| 111 |
+
The username of the Discussion / Pull Request author.
|
| 112 |
+
Can be `"deleted"` if the user has been deleted since.
|
| 113 |
+
is_pull_request (`bool`):
|
| 114 |
+
Whether or not this is a Pull Request.
|
| 115 |
+
created_at (`datetime`):
|
| 116 |
+
The `datetime` of creation of the Discussion / Pull Request.
|
| 117 |
+
events (`list` of [`DiscussionEvent`])
|
| 118 |
+
The list of [`DiscussionEvents`] in this Discussion or Pull Request.
|
| 119 |
+
conflicting_files (`Union[List[str], bool, None]`, *optional*):
|
| 120 |
+
A list of conflicting files if this is a Pull Request.
|
| 121 |
+
`None` if `self.is_pull_request` is `False`.
|
| 122 |
+
`True` if there are conflicting files but the list can't be retrieved.
|
| 123 |
+
target_branch (`str`, *optional*):
|
| 124 |
+
The branch into which changes are to be merged if this is a
|
| 125 |
+
Pull Request . `None` if `self.is_pull_request` is `False`.
|
| 126 |
+
merge_commit_oid (`str`, *optional*):
|
| 127 |
+
If this is a merged Pull Request , this is set to the OID / SHA of
|
| 128 |
+
the merge commit, `None` otherwise.
|
| 129 |
+
diff (`str`, *optional*):
|
| 130 |
+
The git diff if this is a Pull Request , `None` otherwise.
|
| 131 |
+
endpoint (`str`):
|
| 132 |
+
Endpoint of the Hub. Default is https://huggingface.co.
|
| 133 |
+
git_reference (`str`, *optional*):
|
| 134 |
+
(property) Git reference to which changes can be pushed if this is a Pull Request, `None` otherwise.
|
| 135 |
+
url (`str`):
|
| 136 |
+
(property) URL of the discussion on the Hub.
|
| 137 |
+
"""
|
| 138 |
+
|
| 139 |
+
events: List["DiscussionEvent"]
|
| 140 |
+
conflicting_files: Union[List[str], bool, None]
|
| 141 |
+
target_branch: Optional[str]
|
| 142 |
+
merge_commit_oid: Optional[str]
|
| 143 |
+
diff: Optional[str]
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
@dataclass
|
| 147 |
+
class DiscussionEvent:
|
| 148 |
+
"""
|
| 149 |
+
An event in a Discussion or Pull Request.
|
| 150 |
+
|
| 151 |
+
Use concrete classes:
|
| 152 |
+
* [`DiscussionComment`]
|
| 153 |
+
* [`DiscussionStatusChange`]
|
| 154 |
+
* [`DiscussionCommit`]
|
| 155 |
+
* [`DiscussionTitleChange`]
|
| 156 |
+
|
| 157 |
+
Attributes:
|
| 158 |
+
id (`str`):
|
| 159 |
+
The ID of the event. An hexadecimal string.
|
| 160 |
+
type (`str`):
|
| 161 |
+
The type of the event.
|
| 162 |
+
created_at (`datetime`):
|
| 163 |
+
A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime)
|
| 164 |
+
object holding the creation timestamp for the event.
|
| 165 |
+
author (`str`):
|
| 166 |
+
The username of the Discussion / Pull Request author.
|
| 167 |
+
Can be `"deleted"` if the user has been deleted since.
|
| 168 |
+
"""
|
| 169 |
+
|
| 170 |
+
id: str
|
| 171 |
+
type: str
|
| 172 |
+
created_at: datetime
|
| 173 |
+
author: str
|
| 174 |
+
|
| 175 |
+
_event: dict
|
| 176 |
+
"""Stores the original event data, in case we need to access it later."""
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
@dataclass
|
| 180 |
+
class DiscussionComment(DiscussionEvent):
|
| 181 |
+
"""A comment in a Discussion / Pull Request.
|
| 182 |
+
|
| 183 |
+
Subclass of [`DiscussionEvent`].
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
Attributes:
|
| 187 |
+
id (`str`):
|
| 188 |
+
The ID of the event. An hexadecimal string.
|
| 189 |
+
type (`str`):
|
| 190 |
+
The type of the event.
|
| 191 |
+
created_at (`datetime`):
|
| 192 |
+
A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime)
|
| 193 |
+
object holding the creation timestamp for the event.
|
| 194 |
+
author (`str`):
|
| 195 |
+
The username of the Discussion / Pull Request author.
|
| 196 |
+
Can be `"deleted"` if the user has been deleted since.
|
| 197 |
+
content (`str`):
|
| 198 |
+
The raw markdown content of the comment. Mentions, links and images are not rendered.
|
| 199 |
+
edited (`bool`):
|
| 200 |
+
Whether or not this comment has been edited.
|
| 201 |
+
hidden (`bool`):
|
| 202 |
+
Whether or not this comment has been hidden.
|
| 203 |
+
"""
|
| 204 |
+
|
| 205 |
+
content: str
|
| 206 |
+
edited: bool
|
| 207 |
+
hidden: bool
|
| 208 |
+
|
| 209 |
+
@property
|
| 210 |
+
def rendered(self) -> str:
|
| 211 |
+
"""The rendered comment, as a HTML string"""
|
| 212 |
+
return self._event["data"]["latest"]["html"]
|
| 213 |
+
|
| 214 |
+
@property
|
| 215 |
+
def last_edited_at(self) -> datetime:
|
| 216 |
+
"""The last edit time, as a `datetime` object."""
|
| 217 |
+
return parse_datetime(self._event["data"]["latest"]["updatedAt"])
|
| 218 |
+
|
| 219 |
+
@property
|
| 220 |
+
def last_edited_by(self) -> str:
|
| 221 |
+
"""The last edit time, as a `datetime` object."""
|
| 222 |
+
return self._event["data"]["latest"].get("author", {}).get("name", "deleted")
|
| 223 |
+
|
| 224 |
+
@property
|
| 225 |
+
def edit_history(self) -> List[dict]:
|
| 226 |
+
"""The edit history of the comment"""
|
| 227 |
+
return self._event["data"]["history"]
|
| 228 |
+
|
| 229 |
+
@property
|
| 230 |
+
def number_of_edits(self) -> int:
|
| 231 |
+
return len(self.edit_history)
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
@dataclass
|
| 235 |
+
class DiscussionStatusChange(DiscussionEvent):
|
| 236 |
+
"""A change of status in a Discussion / Pull Request.
|
| 237 |
+
|
| 238 |
+
Subclass of [`DiscussionEvent`].
|
| 239 |
+
|
| 240 |
+
Attributes:
|
| 241 |
+
id (`str`):
|
| 242 |
+
The ID of the event. An hexadecimal string.
|
| 243 |
+
type (`str`):
|
| 244 |
+
The type of the event.
|
| 245 |
+
created_at (`datetime`):
|
| 246 |
+
A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime)
|
| 247 |
+
object holding the creation timestamp for the event.
|
| 248 |
+
author (`str`):
|
| 249 |
+
The username of the Discussion / Pull Request author.
|
| 250 |
+
Can be `"deleted"` if the user has been deleted since.
|
| 251 |
+
new_status (`str`):
|
| 252 |
+
The status of the Discussion / Pull Request after the change.
|
| 253 |
+
It can be one of:
|
| 254 |
+
* `"open"`
|
| 255 |
+
* `"closed"`
|
| 256 |
+
* `"merged"` (only for Pull Requests )
|
| 257 |
+
"""
|
| 258 |
+
|
| 259 |
+
new_status: str
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
@dataclass
|
| 263 |
+
class DiscussionCommit(DiscussionEvent):
|
| 264 |
+
"""A commit in a Pull Request.
|
| 265 |
+
|
| 266 |
+
Subclass of [`DiscussionEvent`].
|
| 267 |
+
|
| 268 |
+
Attributes:
|
| 269 |
+
id (`str`):
|
| 270 |
+
The ID of the event. An hexadecimal string.
|
| 271 |
+
type (`str`):
|
| 272 |
+
The type of the event.
|
| 273 |
+
created_at (`datetime`):
|
| 274 |
+
A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime)
|
| 275 |
+
object holding the creation timestamp for the event.
|
| 276 |
+
author (`str`):
|
| 277 |
+
The username of the Discussion / Pull Request author.
|
| 278 |
+
Can be `"deleted"` if the user has been deleted since.
|
| 279 |
+
summary (`str`):
|
| 280 |
+
The summary of the commit.
|
| 281 |
+
oid (`str`):
|
| 282 |
+
The OID / SHA of the commit, as a hexadecimal string.
|
| 283 |
+
"""
|
| 284 |
+
|
| 285 |
+
summary: str
|
| 286 |
+
oid: str
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
@dataclass
|
| 290 |
+
class DiscussionTitleChange(DiscussionEvent):
|
| 291 |
+
"""A rename event in a Discussion / Pull Request.
|
| 292 |
+
|
| 293 |
+
Subclass of [`DiscussionEvent`].
|
| 294 |
+
|
| 295 |
+
Attributes:
|
| 296 |
+
id (`str`):
|
| 297 |
+
The ID of the event. An hexadecimal string.
|
| 298 |
+
type (`str`):
|
| 299 |
+
The type of the event.
|
| 300 |
+
created_at (`datetime`):
|
| 301 |
+
A [`datetime`](https://docs.python.org/3/library/datetime.html?highlight=datetime#datetime.datetime)
|
| 302 |
+
object holding the creation timestamp for the event.
|
| 303 |
+
author (`str`):
|
| 304 |
+
The username of the Discussion / Pull Request author.
|
| 305 |
+
Can be `"deleted"` if the user has been deleted since.
|
| 306 |
+
old_title (`str`):
|
| 307 |
+
The previous title for the Discussion / Pull Request.
|
| 308 |
+
new_title (`str`):
|
| 309 |
+
The new title.
|
| 310 |
+
"""
|
| 311 |
+
|
| 312 |
+
old_title: str
|
| 313 |
+
new_title: str
|
| 314 |
+
|
| 315 |
+
|
| 316 |
+
def deserialize_event(event: dict) -> DiscussionEvent:
|
| 317 |
+
"""Instantiates a [`DiscussionEvent`] from a dict"""
|
| 318 |
+
event_id: str = event["id"]
|
| 319 |
+
event_type: str = event["type"]
|
| 320 |
+
created_at = parse_datetime(event["createdAt"])
|
| 321 |
+
|
| 322 |
+
common_args = dict(
|
| 323 |
+
id=event_id,
|
| 324 |
+
type=event_type,
|
| 325 |
+
created_at=created_at,
|
| 326 |
+
author=event.get("author", {}).get("name", "deleted"),
|
| 327 |
+
_event=event,
|
| 328 |
+
)
|
| 329 |
+
|
| 330 |
+
if event_type == "comment":
|
| 331 |
+
return DiscussionComment(
|
| 332 |
+
**common_args,
|
| 333 |
+
edited=event["data"]["edited"],
|
| 334 |
+
hidden=event["data"]["hidden"],
|
| 335 |
+
content=event["data"]["latest"]["raw"],
|
| 336 |
+
)
|
| 337 |
+
if event_type == "status-change":
|
| 338 |
+
return DiscussionStatusChange(
|
| 339 |
+
**common_args,
|
| 340 |
+
new_status=event["data"]["status"],
|
| 341 |
+
)
|
| 342 |
+
if event_type == "commit":
|
| 343 |
+
return DiscussionCommit(
|
| 344 |
+
**common_args,
|
| 345 |
+
summary=event["data"]["subject"],
|
| 346 |
+
oid=event["data"]["oid"],
|
| 347 |
+
)
|
| 348 |
+
if event_type == "title-change":
|
| 349 |
+
return DiscussionTitleChange(
|
| 350 |
+
**common_args,
|
| 351 |
+
old_title=event["data"]["from"],
|
| 352 |
+
new_title=event["data"]["to"],
|
| 353 |
+
)
|
| 354 |
+
|
| 355 |
+
return DiscussionEvent(**common_args)
|
parrot/lib/python3.10/site-packages/huggingface_hub/constants.py
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import re
|
| 3 |
+
import typing
|
| 4 |
+
from typing import Literal, Optional, Tuple
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
# Possible values for env variables
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
ENV_VARS_TRUE_VALUES = {"1", "ON", "YES", "TRUE"}
|
| 11 |
+
ENV_VARS_TRUE_AND_AUTO_VALUES = ENV_VARS_TRUE_VALUES.union({"AUTO"})
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _is_true(value: Optional[str]) -> bool:
|
| 15 |
+
if value is None:
|
| 16 |
+
return False
|
| 17 |
+
return value.upper() in ENV_VARS_TRUE_VALUES
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _as_int(value: Optional[str]) -> Optional[int]:
|
| 21 |
+
if value is None:
|
| 22 |
+
return None
|
| 23 |
+
return int(value)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
# Constants for file downloads
|
| 27 |
+
|
| 28 |
+
PYTORCH_WEIGHTS_NAME = "pytorch_model.bin"
|
| 29 |
+
TF2_WEIGHTS_NAME = "tf_model.h5"
|
| 30 |
+
TF_WEIGHTS_NAME = "model.ckpt"
|
| 31 |
+
FLAX_WEIGHTS_NAME = "flax_model.msgpack"
|
| 32 |
+
CONFIG_NAME = "config.json"
|
| 33 |
+
REPOCARD_NAME = "README.md"
|
| 34 |
+
DEFAULT_ETAG_TIMEOUT = 10
|
| 35 |
+
DEFAULT_DOWNLOAD_TIMEOUT = 10
|
| 36 |
+
DEFAULT_REQUEST_TIMEOUT = 10
|
| 37 |
+
DOWNLOAD_CHUNK_SIZE = 10 * 1024 * 1024
|
| 38 |
+
HF_TRANSFER_CONCURRENCY = 100
|
| 39 |
+
|
| 40 |
+
# Constants for serialization
|
| 41 |
+
|
| 42 |
+
PYTORCH_WEIGHTS_FILE_PATTERN = "pytorch_model{suffix}.bin" # Unsafe pickle: use safetensors instead
|
| 43 |
+
SAFETENSORS_WEIGHTS_FILE_PATTERN = "model{suffix}.safetensors"
|
| 44 |
+
TF2_WEIGHTS_FILE_PATTERN = "tf_model{suffix}.h5"
|
| 45 |
+
|
| 46 |
+
# Constants for safetensors repos
|
| 47 |
+
|
| 48 |
+
SAFETENSORS_SINGLE_FILE = "model.safetensors"
|
| 49 |
+
SAFETENSORS_INDEX_FILE = "model.safetensors.index.json"
|
| 50 |
+
SAFETENSORS_MAX_HEADER_LENGTH = 25_000_000
|
| 51 |
+
|
| 52 |
+
# Timeout of aquiring file lock and logging the attempt
|
| 53 |
+
FILELOCK_LOG_EVERY_SECONDS = 10
|
| 54 |
+
|
| 55 |
+
# Git-related constants
|
| 56 |
+
|
| 57 |
+
DEFAULT_REVISION = "main"
|
| 58 |
+
REGEX_COMMIT_OID = re.compile(r"[A-Fa-f0-9]{5,40}")
|
| 59 |
+
|
| 60 |
+
HUGGINGFACE_CO_URL_HOME = "https://huggingface.co/"
|
| 61 |
+
|
| 62 |
+
_staging_mode = _is_true(os.environ.get("HUGGINGFACE_CO_STAGING"))
|
| 63 |
+
|
| 64 |
+
_HF_DEFAULT_ENDPOINT = "https://huggingface.co"
|
| 65 |
+
_HF_DEFAULT_STAGING_ENDPOINT = "https://hub-ci.huggingface.co"
|
| 66 |
+
ENDPOINT = os.getenv("HF_ENDPOINT", _HF_DEFAULT_ENDPOINT).rstrip("/")
|
| 67 |
+
HUGGINGFACE_CO_URL_TEMPLATE = ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}"
|
| 68 |
+
|
| 69 |
+
if _staging_mode:
|
| 70 |
+
ENDPOINT = _HF_DEFAULT_STAGING_ENDPOINT
|
| 71 |
+
HUGGINGFACE_CO_URL_TEMPLATE = _HF_DEFAULT_STAGING_ENDPOINT + "/{repo_id}/resolve/{revision}/{filename}"
|
| 72 |
+
|
| 73 |
+
HUGGINGFACE_HEADER_X_REPO_COMMIT = "X-Repo-Commit"
|
| 74 |
+
HUGGINGFACE_HEADER_X_LINKED_ETAG = "X-Linked-Etag"
|
| 75 |
+
HUGGINGFACE_HEADER_X_LINKED_SIZE = "X-Linked-Size"
|
| 76 |
+
|
| 77 |
+
INFERENCE_ENDPOINT = os.environ.get("HF_INFERENCE_ENDPOINT", "https://api-inference.huggingface.co")
|
| 78 |
+
|
| 79 |
+
# See https://huggingface.co/docs/inference-endpoints/index
|
| 80 |
+
INFERENCE_ENDPOINTS_ENDPOINT = "https://api.endpoints.huggingface.cloud/v2"
|
| 81 |
+
|
| 82 |
+
# Proxy for third-party providers
|
| 83 |
+
INFERENCE_PROXY_TEMPLATE = "https://router.huggingface.co/{provider}"
|
| 84 |
+
|
| 85 |
+
REPO_ID_SEPARATOR = "--"
|
| 86 |
+
# ^ this substring is not allowed in repo_ids on hf.co
|
| 87 |
+
# and is the canonical one we use for serialization of repo ids elsewhere.
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
REPO_TYPE_DATASET = "dataset"
|
| 91 |
+
REPO_TYPE_SPACE = "space"
|
| 92 |
+
REPO_TYPE_MODEL = "model"
|
| 93 |
+
REPO_TYPES = [None, REPO_TYPE_MODEL, REPO_TYPE_DATASET, REPO_TYPE_SPACE]
|
| 94 |
+
SPACES_SDK_TYPES = ["gradio", "streamlit", "docker", "static"]
|
| 95 |
+
|
| 96 |
+
REPO_TYPES_URL_PREFIXES = {
|
| 97 |
+
REPO_TYPE_DATASET: "datasets/",
|
| 98 |
+
REPO_TYPE_SPACE: "spaces/",
|
| 99 |
+
}
|
| 100 |
+
REPO_TYPES_MAPPING = {
|
| 101 |
+
"datasets": REPO_TYPE_DATASET,
|
| 102 |
+
"spaces": REPO_TYPE_SPACE,
|
| 103 |
+
"models": REPO_TYPE_MODEL,
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
DiscussionTypeFilter = Literal["all", "discussion", "pull_request"]
|
| 107 |
+
DISCUSSION_TYPES: Tuple[DiscussionTypeFilter, ...] = typing.get_args(DiscussionTypeFilter)
|
| 108 |
+
DiscussionStatusFilter = Literal["all", "open", "closed"]
|
| 109 |
+
DISCUSSION_STATUS: Tuple[DiscussionTypeFilter, ...] = typing.get_args(DiscussionStatusFilter)
|
| 110 |
+
|
| 111 |
+
# Webhook subscription types
|
| 112 |
+
WEBHOOK_DOMAIN_T = Literal["repo", "discussions"]
|
| 113 |
+
|
| 114 |
+
# default cache
|
| 115 |
+
default_home = os.path.join(os.path.expanduser("~"), ".cache")
|
| 116 |
+
HF_HOME = os.path.expanduser(
|
| 117 |
+
os.getenv(
|
| 118 |
+
"HF_HOME",
|
| 119 |
+
os.path.join(os.getenv("XDG_CACHE_HOME", default_home), "huggingface"),
|
| 120 |
+
)
|
| 121 |
+
)
|
| 122 |
+
hf_cache_home = HF_HOME # for backward compatibility. TODO: remove this in 1.0.0
|
| 123 |
+
|
| 124 |
+
default_cache_path = os.path.join(HF_HOME, "hub")
|
| 125 |
+
default_assets_cache_path = os.path.join(HF_HOME, "assets")
|
| 126 |
+
|
| 127 |
+
# Legacy env variables
|
| 128 |
+
HUGGINGFACE_HUB_CACHE = os.getenv("HUGGINGFACE_HUB_CACHE", default_cache_path)
|
| 129 |
+
HUGGINGFACE_ASSETS_CACHE = os.getenv("HUGGINGFACE_ASSETS_CACHE", default_assets_cache_path)
|
| 130 |
+
|
| 131 |
+
# New env variables
|
| 132 |
+
HF_HUB_CACHE = os.getenv("HF_HUB_CACHE", HUGGINGFACE_HUB_CACHE)
|
| 133 |
+
HF_ASSETS_CACHE = os.getenv("HF_ASSETS_CACHE", HUGGINGFACE_ASSETS_CACHE)
|
| 134 |
+
|
| 135 |
+
HF_HUB_OFFLINE = _is_true(os.environ.get("HF_HUB_OFFLINE") or os.environ.get("TRANSFORMERS_OFFLINE"))
|
| 136 |
+
|
| 137 |
+
# If set, log level will be set to DEBUG and all requests made to the Hub will be logged
|
| 138 |
+
# as curl commands for reproducibility.
|
| 139 |
+
HF_DEBUG = _is_true(os.environ.get("HF_DEBUG"))
|
| 140 |
+
|
| 141 |
+
# Opt-out from telemetry requests
|
| 142 |
+
HF_HUB_DISABLE_TELEMETRY = (
|
| 143 |
+
_is_true(os.environ.get("HF_HUB_DISABLE_TELEMETRY")) # HF-specific env variable
|
| 144 |
+
or _is_true(os.environ.get("DISABLE_TELEMETRY"))
|
| 145 |
+
or _is_true(os.environ.get("DO_NOT_TRACK")) # https://consoledonottrack.com/
|
| 146 |
+
)
|
| 147 |
+
|
| 148 |
+
HF_TOKEN_PATH = os.environ.get("HF_TOKEN_PATH", os.path.join(HF_HOME, "token"))
|
| 149 |
+
HF_STORED_TOKENS_PATH = os.path.join(os.path.dirname(HF_TOKEN_PATH), "stored_tokens")
|
| 150 |
+
|
| 151 |
+
if _staging_mode:
|
| 152 |
+
# In staging mode, we use a different cache to ensure we don't mix up production and staging data or tokens
|
| 153 |
+
# In practice in `huggingface_hub` tests, we monkeypatch these values with temporary directories. The following
|
| 154 |
+
# lines are only used in third-party libraries tests (e.g. `transformers`, `diffusers`, etc.).
|
| 155 |
+
_staging_home = os.path.join(os.path.expanduser("~"), ".cache", "huggingface_staging")
|
| 156 |
+
HUGGINGFACE_HUB_CACHE = os.path.join(_staging_home, "hub")
|
| 157 |
+
HF_TOKEN_PATH = os.path.join(_staging_home, "token")
|
| 158 |
+
|
| 159 |
+
# Here, `True` will disable progress bars globally without possibility of enabling it
|
| 160 |
+
# programmatically. `False` will enable them without possibility of disabling them.
|
| 161 |
+
# If environment variable is not set (None), then the user is free to enable/disable
|
| 162 |
+
# them programmatically.
|
| 163 |
+
# TL;DR: env variable has priority over code
|
| 164 |
+
__HF_HUB_DISABLE_PROGRESS_BARS = os.environ.get("HF_HUB_DISABLE_PROGRESS_BARS")
|
| 165 |
+
HF_HUB_DISABLE_PROGRESS_BARS: Optional[bool] = (
|
| 166 |
+
_is_true(__HF_HUB_DISABLE_PROGRESS_BARS) if __HF_HUB_DISABLE_PROGRESS_BARS is not None else None
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
# Disable warning on machines that do not support symlinks (e.g. Windows non-developer)
|
| 170 |
+
HF_HUB_DISABLE_SYMLINKS_WARNING: bool = _is_true(os.environ.get("HF_HUB_DISABLE_SYMLINKS_WARNING"))
|
| 171 |
+
|
| 172 |
+
# Disable warning when using experimental features
|
| 173 |
+
HF_HUB_DISABLE_EXPERIMENTAL_WARNING: bool = _is_true(os.environ.get("HF_HUB_DISABLE_EXPERIMENTAL_WARNING"))
|
| 174 |
+
|
| 175 |
+
# Disable sending the cached token by default is all HTTP requests to the Hub
|
| 176 |
+
HF_HUB_DISABLE_IMPLICIT_TOKEN: bool = _is_true(os.environ.get("HF_HUB_DISABLE_IMPLICIT_TOKEN"))
|
| 177 |
+
|
| 178 |
+
# Enable fast-download using external dependency "hf_transfer"
|
| 179 |
+
# See:
|
| 180 |
+
# - https://pypi.org/project/hf-transfer/
|
| 181 |
+
# - https://github.com/huggingface/hf_transfer (private)
|
| 182 |
+
HF_HUB_ENABLE_HF_TRANSFER: bool = _is_true(os.environ.get("HF_HUB_ENABLE_HF_TRANSFER"))
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
# UNUSED
|
| 186 |
+
# We don't use symlinks in local dir anymore.
|
| 187 |
+
HF_HUB_LOCAL_DIR_AUTO_SYMLINK_THRESHOLD: int = (
|
| 188 |
+
_as_int(os.environ.get("HF_HUB_LOCAL_DIR_AUTO_SYMLINK_THRESHOLD")) or 5 * 1024 * 1024
|
| 189 |
+
)
|
| 190 |
+
|
| 191 |
+
# Used to override the etag timeout on a system level
|
| 192 |
+
HF_HUB_ETAG_TIMEOUT: int = _as_int(os.environ.get("HF_HUB_ETAG_TIMEOUT")) or DEFAULT_ETAG_TIMEOUT
|
| 193 |
+
|
| 194 |
+
# Used to override the get request timeout on a system level
|
| 195 |
+
HF_HUB_DOWNLOAD_TIMEOUT: int = _as_int(os.environ.get("HF_HUB_DOWNLOAD_TIMEOUT")) or DEFAULT_DOWNLOAD_TIMEOUT
|
| 196 |
+
|
| 197 |
+
# Allows to add information about the requester in the user-agent (eg. partner name)
|
| 198 |
+
HF_HUB_USER_AGENT_ORIGIN: Optional[str] = os.environ.get("HF_HUB_USER_AGENT_ORIGIN")
|
| 199 |
+
|
| 200 |
+
# List frameworks that are handled by the InferenceAPI service. Useful to scan endpoints and check which models are
|
| 201 |
+
# deployed and running. Since 95% of the models are using the top 4 frameworks listed below, we scan only those by
|
| 202 |
+
# default. We still keep the full list of supported frameworks in case we want to scan all of them.
|
| 203 |
+
MAIN_INFERENCE_API_FRAMEWORKS = [
|
| 204 |
+
"diffusers",
|
| 205 |
+
"sentence-transformers",
|
| 206 |
+
"text-generation-inference",
|
| 207 |
+
"transformers",
|
| 208 |
+
]
|
| 209 |
+
|
| 210 |
+
ALL_INFERENCE_API_FRAMEWORKS = MAIN_INFERENCE_API_FRAMEWORKS + [
|
| 211 |
+
"adapter-transformers",
|
| 212 |
+
"allennlp",
|
| 213 |
+
"asteroid",
|
| 214 |
+
"bertopic",
|
| 215 |
+
"doctr",
|
| 216 |
+
"espnet",
|
| 217 |
+
"fairseq",
|
| 218 |
+
"fastai",
|
| 219 |
+
"fasttext",
|
| 220 |
+
"flair",
|
| 221 |
+
"k2",
|
| 222 |
+
"keras",
|
| 223 |
+
"mindspore",
|
| 224 |
+
"nemo",
|
| 225 |
+
"open_clip",
|
| 226 |
+
"paddlenlp",
|
| 227 |
+
"peft",
|
| 228 |
+
"pyannote-audio",
|
| 229 |
+
"sklearn",
|
| 230 |
+
"spacy",
|
| 231 |
+
"span-marker",
|
| 232 |
+
"speechbrain",
|
| 233 |
+
"stanza",
|
| 234 |
+
"timm",
|
| 235 |
+
]
|
parrot/lib/python3.10/site-packages/huggingface_hub/fastai_utils.py
ADDED
|
@@ -0,0 +1,425 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import os
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
from pickle import DEFAULT_PROTOCOL, PicklingError
|
| 5 |
+
from typing import Any, Dict, List, Optional, Union
|
| 6 |
+
|
| 7 |
+
from packaging import version
|
| 8 |
+
|
| 9 |
+
from huggingface_hub import constants, snapshot_download
|
| 10 |
+
from huggingface_hub.hf_api import HfApi
|
| 11 |
+
from huggingface_hub.utils import (
|
| 12 |
+
SoftTemporaryDirectory,
|
| 13 |
+
get_fastai_version,
|
| 14 |
+
get_fastcore_version,
|
| 15 |
+
get_python_version,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
from .utils import logging, validate_hf_hub_args
|
| 19 |
+
from .utils._runtime import _PY_VERSION # noqa: F401 # for backward compatibility...
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
logger = logging.get_logger(__name__)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _check_fastai_fastcore_versions(
|
| 26 |
+
fastai_min_version: str = "2.4",
|
| 27 |
+
fastcore_min_version: str = "1.3.27",
|
| 28 |
+
):
|
| 29 |
+
"""
|
| 30 |
+
Checks that the installed fastai and fastcore versions are compatible for pickle serialization.
|
| 31 |
+
|
| 32 |
+
Args:
|
| 33 |
+
fastai_min_version (`str`, *optional*):
|
| 34 |
+
The minimum fastai version supported.
|
| 35 |
+
fastcore_min_version (`str`, *optional*):
|
| 36 |
+
The minimum fastcore version supported.
|
| 37 |
+
|
| 38 |
+
<Tip>
|
| 39 |
+
Raises the following error:
|
| 40 |
+
|
| 41 |
+
- [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
|
| 42 |
+
if the fastai or fastcore libraries are not available or are of an invalid version.
|
| 43 |
+
|
| 44 |
+
</Tip>
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
if (get_fastcore_version() or get_fastai_version()) == "N/A":
|
| 48 |
+
raise ImportError(
|
| 49 |
+
f"fastai>={fastai_min_version} and fastcore>={fastcore_min_version} are"
|
| 50 |
+
f" required. Currently using fastai=={get_fastai_version()} and"
|
| 51 |
+
f" fastcore=={get_fastcore_version()}."
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
current_fastai_version = version.Version(get_fastai_version())
|
| 55 |
+
current_fastcore_version = version.Version(get_fastcore_version())
|
| 56 |
+
|
| 57 |
+
if current_fastai_version < version.Version(fastai_min_version):
|
| 58 |
+
raise ImportError(
|
| 59 |
+
"`push_to_hub_fastai` and `from_pretrained_fastai` require a"
|
| 60 |
+
f" fastai>={fastai_min_version} version, but you are using fastai version"
|
| 61 |
+
f" {get_fastai_version()} which is incompatible. Upgrade with `pip install"
|
| 62 |
+
" fastai==2.5.6`."
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
if current_fastcore_version < version.Version(fastcore_min_version):
|
| 66 |
+
raise ImportError(
|
| 67 |
+
"`push_to_hub_fastai` and `from_pretrained_fastai` require a"
|
| 68 |
+
f" fastcore>={fastcore_min_version} version, but you are using fastcore"
|
| 69 |
+
f" version {get_fastcore_version()} which is incompatible. Upgrade with"
|
| 70 |
+
" `pip install fastcore==1.3.27`."
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def _check_fastai_fastcore_pyproject_versions(
|
| 75 |
+
storage_folder: str,
|
| 76 |
+
fastai_min_version: str = "2.4",
|
| 77 |
+
fastcore_min_version: str = "1.3.27",
|
| 78 |
+
):
|
| 79 |
+
"""
|
| 80 |
+
Checks that the `pyproject.toml` file in the directory `storage_folder` has fastai and fastcore versions
|
| 81 |
+
that are compatible with `from_pretrained_fastai` and `push_to_hub_fastai`. If `pyproject.toml` does not exist
|
| 82 |
+
or does not contain versions for fastai and fastcore, then it logs a warning.
|
| 83 |
+
|
| 84 |
+
Args:
|
| 85 |
+
storage_folder (`str`):
|
| 86 |
+
Folder to look for the `pyproject.toml` file.
|
| 87 |
+
fastai_min_version (`str`, *optional*):
|
| 88 |
+
The minimum fastai version supported.
|
| 89 |
+
fastcore_min_version (`str`, *optional*):
|
| 90 |
+
The minimum fastcore version supported.
|
| 91 |
+
|
| 92 |
+
<Tip>
|
| 93 |
+
Raises the following errors:
|
| 94 |
+
|
| 95 |
+
- [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
|
| 96 |
+
if the `toml` module is not installed.
|
| 97 |
+
- [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
|
| 98 |
+
if the `pyproject.toml` indicates a lower than minimum supported version of fastai or fastcore.
|
| 99 |
+
|
| 100 |
+
</Tip>
|
| 101 |
+
"""
|
| 102 |
+
|
| 103 |
+
try:
|
| 104 |
+
import toml
|
| 105 |
+
except ModuleNotFoundError:
|
| 106 |
+
raise ImportError(
|
| 107 |
+
"`push_to_hub_fastai` and `from_pretrained_fastai` require the toml module."
|
| 108 |
+
" Install it with `pip install toml`."
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
# Checks that a `pyproject.toml`, with `build-system` and `requires` sections, exists in the repository. If so, get a list of required packages.
|
| 112 |
+
if not os.path.isfile(f"{storage_folder}/pyproject.toml"):
|
| 113 |
+
logger.warning(
|
| 114 |
+
"There is no `pyproject.toml` in the repository that contains the fastai"
|
| 115 |
+
" `Learner`. The `pyproject.toml` would allow us to verify that your fastai"
|
| 116 |
+
" and fastcore versions are compatible with those of the model you want to"
|
| 117 |
+
" load."
|
| 118 |
+
)
|
| 119 |
+
return
|
| 120 |
+
pyproject_toml = toml.load(f"{storage_folder}/pyproject.toml")
|
| 121 |
+
|
| 122 |
+
if "build-system" not in pyproject_toml.keys():
|
| 123 |
+
logger.warning(
|
| 124 |
+
"There is no `build-system` section in the pyproject.toml of the repository"
|
| 125 |
+
" that contains the fastai `Learner`. The `build-system` would allow us to"
|
| 126 |
+
" verify that your fastai and fastcore versions are compatible with those"
|
| 127 |
+
" of the model you want to load."
|
| 128 |
+
)
|
| 129 |
+
return
|
| 130 |
+
build_system_toml = pyproject_toml["build-system"]
|
| 131 |
+
|
| 132 |
+
if "requires" not in build_system_toml.keys():
|
| 133 |
+
logger.warning(
|
| 134 |
+
"There is no `requires` section in the pyproject.toml of the repository"
|
| 135 |
+
" that contains the fastai `Learner`. The `requires` would allow us to"
|
| 136 |
+
" verify that your fastai and fastcore versions are compatible with those"
|
| 137 |
+
" of the model you want to load."
|
| 138 |
+
)
|
| 139 |
+
return
|
| 140 |
+
package_versions = build_system_toml["requires"]
|
| 141 |
+
|
| 142 |
+
# Extracts contains fastai and fastcore versions from `pyproject.toml` if available.
|
| 143 |
+
# If the package is specified but not the version (e.g. "fastai" instead of "fastai=2.4"), the default versions are the highest.
|
| 144 |
+
fastai_packages = [pck for pck in package_versions if pck.startswith("fastai")]
|
| 145 |
+
if len(fastai_packages) == 0:
|
| 146 |
+
logger.warning("The repository does not have a fastai version specified in the `pyproject.toml`.")
|
| 147 |
+
# fastai_version is an empty string if not specified
|
| 148 |
+
else:
|
| 149 |
+
fastai_version = str(fastai_packages[0]).partition("=")[2]
|
| 150 |
+
if fastai_version != "" and version.Version(fastai_version) < version.Version(fastai_min_version):
|
| 151 |
+
raise ImportError(
|
| 152 |
+
"`from_pretrained_fastai` requires"
|
| 153 |
+
f" fastai>={fastai_min_version} version but the model to load uses"
|
| 154 |
+
f" {fastai_version} which is incompatible."
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
fastcore_packages = [pck for pck in package_versions if pck.startswith("fastcore")]
|
| 158 |
+
if len(fastcore_packages) == 0:
|
| 159 |
+
logger.warning("The repository does not have a fastcore version specified in the `pyproject.toml`.")
|
| 160 |
+
# fastcore_version is an empty string if not specified
|
| 161 |
+
else:
|
| 162 |
+
fastcore_version = str(fastcore_packages[0]).partition("=")[2]
|
| 163 |
+
if fastcore_version != "" and version.Version(fastcore_version) < version.Version(fastcore_min_version):
|
| 164 |
+
raise ImportError(
|
| 165 |
+
"`from_pretrained_fastai` requires"
|
| 166 |
+
f" fastcore>={fastcore_min_version} version, but you are using fastcore"
|
| 167 |
+
f" version {fastcore_version} which is incompatible."
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
README_TEMPLATE = """---
|
| 172 |
+
tags:
|
| 173 |
+
- fastai
|
| 174 |
+
---
|
| 175 |
+
|
| 176 |
+
# Amazing!
|
| 177 |
+
|
| 178 |
+
🥳 Congratulations on hosting your fastai model on the Hugging Face Hub!
|
| 179 |
+
|
| 180 |
+
# Some next steps
|
| 181 |
+
1. Fill out this model card with more information (see the template below and the [documentation here](https://huggingface.co/docs/hub/model-repos))!
|
| 182 |
+
|
| 183 |
+
2. Create a demo in Gradio or Streamlit using 🤗 Spaces ([documentation here](https://huggingface.co/docs/hub/spaces)).
|
| 184 |
+
|
| 185 |
+
3. Join the fastai community on the [Fastai Discord](https://discord.com/invite/YKrxeNn)!
|
| 186 |
+
|
| 187 |
+
Greetings fellow fastlearner 🤝! Don't forget to delete this content from your model card.
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
---
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
# Model card
|
| 194 |
+
|
| 195 |
+
## Model description
|
| 196 |
+
More information needed
|
| 197 |
+
|
| 198 |
+
## Intended uses & limitations
|
| 199 |
+
More information needed
|
| 200 |
+
|
| 201 |
+
## Training and evaluation data
|
| 202 |
+
More information needed
|
| 203 |
+
"""
|
| 204 |
+
|
| 205 |
+
PYPROJECT_TEMPLATE = f"""[build-system]
|
| 206 |
+
requires = ["setuptools>=40.8.0", "wheel", "python={get_python_version()}", "fastai={get_fastai_version()}", "fastcore={get_fastcore_version()}"]
|
| 207 |
+
build-backend = "setuptools.build_meta:__legacy__"
|
| 208 |
+
"""
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def _create_model_card(repo_dir: Path):
|
| 212 |
+
"""
|
| 213 |
+
Creates a model card for the repository.
|
| 214 |
+
|
| 215 |
+
Args:
|
| 216 |
+
repo_dir (`Path`):
|
| 217 |
+
Directory where model card is created.
|
| 218 |
+
"""
|
| 219 |
+
readme_path = repo_dir / "README.md"
|
| 220 |
+
|
| 221 |
+
if not readme_path.exists():
|
| 222 |
+
with readme_path.open("w", encoding="utf-8") as f:
|
| 223 |
+
f.write(README_TEMPLATE)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
def _create_model_pyproject(repo_dir: Path):
|
| 227 |
+
"""
|
| 228 |
+
Creates a `pyproject.toml` for the repository.
|
| 229 |
+
|
| 230 |
+
Args:
|
| 231 |
+
repo_dir (`Path`):
|
| 232 |
+
Directory where `pyproject.toml` is created.
|
| 233 |
+
"""
|
| 234 |
+
pyproject_path = repo_dir / "pyproject.toml"
|
| 235 |
+
|
| 236 |
+
if not pyproject_path.exists():
|
| 237 |
+
with pyproject_path.open("w", encoding="utf-8") as f:
|
| 238 |
+
f.write(PYPROJECT_TEMPLATE)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
def _save_pretrained_fastai(
|
| 242 |
+
learner,
|
| 243 |
+
save_directory: Union[str, Path],
|
| 244 |
+
config: Optional[Dict[str, Any]] = None,
|
| 245 |
+
):
|
| 246 |
+
"""
|
| 247 |
+
Saves a fastai learner to `save_directory` in pickle format using the default pickle protocol for the version of python used.
|
| 248 |
+
|
| 249 |
+
Args:
|
| 250 |
+
learner (`Learner`):
|
| 251 |
+
The `fastai.Learner` you'd like to save.
|
| 252 |
+
save_directory (`str` or `Path`):
|
| 253 |
+
Specific directory in which you want to save the fastai learner.
|
| 254 |
+
config (`dict`, *optional*):
|
| 255 |
+
Configuration object. Will be uploaded as a .json file. Example: 'https://huggingface.co/espejelomar/fastai-pet-breeds-classification/blob/main/config.json'.
|
| 256 |
+
|
| 257 |
+
<Tip>
|
| 258 |
+
|
| 259 |
+
Raises the following error:
|
| 260 |
+
|
| 261 |
+
- [`RuntimeError`](https://docs.python.org/3/library/exceptions.html#RuntimeError)
|
| 262 |
+
if the config file provided is not a dictionary.
|
| 263 |
+
|
| 264 |
+
</Tip>
|
| 265 |
+
"""
|
| 266 |
+
_check_fastai_fastcore_versions()
|
| 267 |
+
|
| 268 |
+
os.makedirs(save_directory, exist_ok=True)
|
| 269 |
+
|
| 270 |
+
# if the user provides config then we update it with the fastai and fastcore versions in CONFIG_TEMPLATE.
|
| 271 |
+
if config is not None:
|
| 272 |
+
if not isinstance(config, dict):
|
| 273 |
+
raise RuntimeError(f"Provided config should be a dict. Got: '{type(config)}'")
|
| 274 |
+
path = os.path.join(save_directory, constants.CONFIG_NAME)
|
| 275 |
+
with open(path, "w") as f:
|
| 276 |
+
json.dump(config, f)
|
| 277 |
+
|
| 278 |
+
_create_model_card(Path(save_directory))
|
| 279 |
+
_create_model_pyproject(Path(save_directory))
|
| 280 |
+
|
| 281 |
+
# learner.export saves the model in `self.path`.
|
| 282 |
+
learner.path = Path(save_directory)
|
| 283 |
+
os.makedirs(save_directory, exist_ok=True)
|
| 284 |
+
try:
|
| 285 |
+
learner.export(
|
| 286 |
+
fname="model.pkl",
|
| 287 |
+
pickle_protocol=DEFAULT_PROTOCOL,
|
| 288 |
+
)
|
| 289 |
+
except PicklingError:
|
| 290 |
+
raise PicklingError(
|
| 291 |
+
"You are using a lambda function, i.e., an anonymous function. `pickle`"
|
| 292 |
+
" cannot pickle function objects and requires that all functions have"
|
| 293 |
+
" names. One possible solution is to name the function."
|
| 294 |
+
)
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
@validate_hf_hub_args
|
| 298 |
+
def from_pretrained_fastai(
|
| 299 |
+
repo_id: str,
|
| 300 |
+
revision: Optional[str] = None,
|
| 301 |
+
):
|
| 302 |
+
"""
|
| 303 |
+
Load pretrained fastai model from the Hub or from a local directory.
|
| 304 |
+
|
| 305 |
+
Args:
|
| 306 |
+
repo_id (`str`):
|
| 307 |
+
The location where the pickled fastai.Learner is. It can be either of the two:
|
| 308 |
+
- Hosted on the Hugging Face Hub. E.g.: 'espejelomar/fatai-pet-breeds-classification' or 'distilgpt2'.
|
| 309 |
+
You can add a `revision` by appending `@` at the end of `repo_id`. E.g.: `dbmdz/bert-base-german-cased@main`.
|
| 310 |
+
Revision is the specific model version to use. Since we use a git-based system for storing models and other
|
| 311 |
+
artifacts on the Hugging Face Hub, it can be a branch name, a tag name, or a commit id.
|
| 312 |
+
- Hosted locally. `repo_id` would be a directory containing the pickle and a pyproject.toml
|
| 313 |
+
indicating the fastai and fastcore versions used to build the `fastai.Learner`. E.g.: `./my_model_directory/`.
|
| 314 |
+
revision (`str`, *optional*):
|
| 315 |
+
Revision at which the repo's files are downloaded. See documentation of `snapshot_download`.
|
| 316 |
+
|
| 317 |
+
Returns:
|
| 318 |
+
The `fastai.Learner` model in the `repo_id` repo.
|
| 319 |
+
"""
|
| 320 |
+
_check_fastai_fastcore_versions()
|
| 321 |
+
|
| 322 |
+
# Load the `repo_id` repo.
|
| 323 |
+
# `snapshot_download` returns the folder where the model was stored.
|
| 324 |
+
# `cache_dir` will be the default '/root/.cache/huggingface/hub'
|
| 325 |
+
if not os.path.isdir(repo_id):
|
| 326 |
+
storage_folder = snapshot_download(
|
| 327 |
+
repo_id=repo_id,
|
| 328 |
+
revision=revision,
|
| 329 |
+
library_name="fastai",
|
| 330 |
+
library_version=get_fastai_version(),
|
| 331 |
+
)
|
| 332 |
+
else:
|
| 333 |
+
storage_folder = repo_id
|
| 334 |
+
|
| 335 |
+
_check_fastai_fastcore_pyproject_versions(storage_folder)
|
| 336 |
+
|
| 337 |
+
from fastai.learner import load_learner # type: ignore
|
| 338 |
+
|
| 339 |
+
return load_learner(os.path.join(storage_folder, "model.pkl"))
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
@validate_hf_hub_args
|
| 343 |
+
def push_to_hub_fastai(
|
| 344 |
+
learner,
|
| 345 |
+
*,
|
| 346 |
+
repo_id: str,
|
| 347 |
+
commit_message: str = "Push FastAI model using huggingface_hub.",
|
| 348 |
+
private: Optional[bool] = None,
|
| 349 |
+
token: Optional[str] = None,
|
| 350 |
+
config: Optional[dict] = None,
|
| 351 |
+
branch: Optional[str] = None,
|
| 352 |
+
create_pr: Optional[bool] = None,
|
| 353 |
+
allow_patterns: Optional[Union[List[str], str]] = None,
|
| 354 |
+
ignore_patterns: Optional[Union[List[str], str]] = None,
|
| 355 |
+
delete_patterns: Optional[Union[List[str], str]] = None,
|
| 356 |
+
api_endpoint: Optional[str] = None,
|
| 357 |
+
):
|
| 358 |
+
"""
|
| 359 |
+
Upload learner checkpoint files to the Hub.
|
| 360 |
+
|
| 361 |
+
Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use
|
| 362 |
+
`delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more
|
| 363 |
+
details.
|
| 364 |
+
|
| 365 |
+
Args:
|
| 366 |
+
learner (`Learner`):
|
| 367 |
+
The `fastai.Learner' you'd like to push to the Hub.
|
| 368 |
+
repo_id (`str`):
|
| 369 |
+
The repository id for your model in Hub in the format of "namespace/repo_name". The namespace can be your individual account or an organization to which you have write access (for example, 'stanfordnlp/stanza-de').
|
| 370 |
+
commit_message (`str`, *optional*):
|
| 371 |
+
Message to commit while pushing. Will default to :obj:`"add model"`.
|
| 372 |
+
private (`bool`, *optional*):
|
| 373 |
+
Whether or not the repository created should be private.
|
| 374 |
+
If `None` (default), will default to been public except if the organization's default is private.
|
| 375 |
+
token (`str`, *optional*):
|
| 376 |
+
The Hugging Face account token to use as HTTP bearer authorization for remote files. If :obj:`None`, the token will be asked by a prompt.
|
| 377 |
+
config (`dict`, *optional*):
|
| 378 |
+
Configuration object to be saved alongside the model weights.
|
| 379 |
+
branch (`str`, *optional*):
|
| 380 |
+
The git branch on which to push the model. This defaults to
|
| 381 |
+
the default branch as specified in your repository, which
|
| 382 |
+
defaults to `"main"`.
|
| 383 |
+
create_pr (`boolean`, *optional*):
|
| 384 |
+
Whether or not to create a Pull Request from `branch` with that commit.
|
| 385 |
+
Defaults to `False`.
|
| 386 |
+
api_endpoint (`str`, *optional*):
|
| 387 |
+
The API endpoint to use when pushing the model to the hub.
|
| 388 |
+
allow_patterns (`List[str]` or `str`, *optional*):
|
| 389 |
+
If provided, only files matching at least one pattern are pushed.
|
| 390 |
+
ignore_patterns (`List[str]` or `str`, *optional*):
|
| 391 |
+
If provided, files matching any of the patterns are not pushed.
|
| 392 |
+
delete_patterns (`List[str]` or `str`, *optional*):
|
| 393 |
+
If provided, remote files matching any of the patterns will be deleted from the repo.
|
| 394 |
+
|
| 395 |
+
Returns:
|
| 396 |
+
The url of the commit of your model in the given repository.
|
| 397 |
+
|
| 398 |
+
<Tip>
|
| 399 |
+
|
| 400 |
+
Raises the following error:
|
| 401 |
+
|
| 402 |
+
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 403 |
+
if the user is not log on to the Hugging Face Hub.
|
| 404 |
+
|
| 405 |
+
</Tip>
|
| 406 |
+
"""
|
| 407 |
+
_check_fastai_fastcore_versions()
|
| 408 |
+
api = HfApi(endpoint=api_endpoint)
|
| 409 |
+
repo_id = api.create_repo(repo_id=repo_id, token=token, private=private, exist_ok=True).repo_id
|
| 410 |
+
|
| 411 |
+
# Push the files to the repo in a single commit
|
| 412 |
+
with SoftTemporaryDirectory() as tmp:
|
| 413 |
+
saved_path = Path(tmp) / repo_id
|
| 414 |
+
_save_pretrained_fastai(learner, saved_path, config=config)
|
| 415 |
+
return api.upload_folder(
|
| 416 |
+
repo_id=repo_id,
|
| 417 |
+
token=token,
|
| 418 |
+
folder_path=saved_path,
|
| 419 |
+
commit_message=commit_message,
|
| 420 |
+
revision=branch,
|
| 421 |
+
create_pr=create_pr,
|
| 422 |
+
allow_patterns=allow_patterns,
|
| 423 |
+
ignore_patterns=ignore_patterns,
|
| 424 |
+
delete_patterns=delete_patterns,
|
| 425 |
+
)
|
parrot/lib/python3.10/site-packages/huggingface_hub/file_download.py
ADDED
|
@@ -0,0 +1,1625 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import copy
|
| 3 |
+
import errno
|
| 4 |
+
import inspect
|
| 5 |
+
import os
|
| 6 |
+
import re
|
| 7 |
+
import shutil
|
| 8 |
+
import stat
|
| 9 |
+
import time
|
| 10 |
+
import uuid
|
| 11 |
+
import warnings
|
| 12 |
+
from dataclasses import dataclass
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from typing import Any, BinaryIO, Dict, Literal, NoReturn, Optional, Tuple, Union
|
| 15 |
+
from urllib.parse import quote, urlparse
|
| 16 |
+
|
| 17 |
+
import requests
|
| 18 |
+
|
| 19 |
+
from . import (
|
| 20 |
+
__version__, # noqa: F401 # for backward compatibility
|
| 21 |
+
constants,
|
| 22 |
+
)
|
| 23 |
+
from ._local_folder import get_local_download_paths, read_download_metadata, write_download_metadata
|
| 24 |
+
from .constants import (
|
| 25 |
+
HUGGINGFACE_CO_URL_TEMPLATE, # noqa: F401 # for backward compatibility
|
| 26 |
+
HUGGINGFACE_HUB_CACHE, # noqa: F401 # for backward compatibility
|
| 27 |
+
)
|
| 28 |
+
from .errors import (
|
| 29 |
+
EntryNotFoundError,
|
| 30 |
+
FileMetadataError,
|
| 31 |
+
GatedRepoError,
|
| 32 |
+
HfHubHTTPError,
|
| 33 |
+
LocalEntryNotFoundError,
|
| 34 |
+
RepositoryNotFoundError,
|
| 35 |
+
RevisionNotFoundError,
|
| 36 |
+
)
|
| 37 |
+
from .utils import (
|
| 38 |
+
OfflineModeIsEnabled,
|
| 39 |
+
SoftTemporaryDirectory,
|
| 40 |
+
WeakFileLock,
|
| 41 |
+
build_hf_headers,
|
| 42 |
+
get_fastai_version, # noqa: F401 # for backward compatibility
|
| 43 |
+
get_fastcore_version, # noqa: F401 # for backward compatibility
|
| 44 |
+
get_graphviz_version, # noqa: F401 # for backward compatibility
|
| 45 |
+
get_jinja_version, # noqa: F401 # for backward compatibility
|
| 46 |
+
get_pydot_version, # noqa: F401 # for backward compatibility
|
| 47 |
+
get_session,
|
| 48 |
+
get_tf_version, # noqa: F401 # for backward compatibility
|
| 49 |
+
get_torch_version, # noqa: F401 # for backward compatibility
|
| 50 |
+
hf_raise_for_status,
|
| 51 |
+
is_fastai_available, # noqa: F401 # for backward compatibility
|
| 52 |
+
is_fastcore_available, # noqa: F401 # for backward compatibility
|
| 53 |
+
is_graphviz_available, # noqa: F401 # for backward compatibility
|
| 54 |
+
is_jinja_available, # noqa: F401 # for backward compatibility
|
| 55 |
+
is_pydot_available, # noqa: F401 # for backward compatibility
|
| 56 |
+
is_tf_available, # noqa: F401 # for backward compatibility
|
| 57 |
+
is_torch_available, # noqa: F401 # for backward compatibility
|
| 58 |
+
logging,
|
| 59 |
+
reset_sessions,
|
| 60 |
+
tqdm,
|
| 61 |
+
validate_hf_hub_args,
|
| 62 |
+
)
|
| 63 |
+
from .utils._http import _adjust_range_header
|
| 64 |
+
from .utils._runtime import _PY_VERSION # noqa: F401 # for backward compatibility
|
| 65 |
+
from .utils._typing import HTTP_METHOD_T
|
| 66 |
+
from .utils.sha import sha_fileobj
|
| 67 |
+
from .utils.tqdm import is_tqdm_disabled
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
logger = logging.get_logger(__name__)
|
| 71 |
+
|
| 72 |
+
# Return value when trying to load a file from cache but the file does not exist in the distant repo.
|
| 73 |
+
_CACHED_NO_EXIST = object()
|
| 74 |
+
_CACHED_NO_EXIST_T = Any
|
| 75 |
+
|
| 76 |
+
# Regex to get filename from a "Content-Disposition" header for CDN-served files
|
| 77 |
+
HEADER_FILENAME_PATTERN = re.compile(r'filename="(?P<filename>.*?)";')
|
| 78 |
+
|
| 79 |
+
# Regex to check if the revision IS directly a commit_hash
|
| 80 |
+
REGEX_COMMIT_HASH = re.compile(r"^[0-9a-f]{40}$")
|
| 81 |
+
|
| 82 |
+
# Regex to check if the file etag IS a valid sha256
|
| 83 |
+
REGEX_SHA256 = re.compile(r"^[0-9a-f]{64}$")
|
| 84 |
+
|
| 85 |
+
_are_symlinks_supported_in_dir: Dict[str, bool] = {}
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def are_symlinks_supported(cache_dir: Union[str, Path, None] = None) -> bool:
|
| 89 |
+
"""Return whether the symlinks are supported on the machine.
|
| 90 |
+
|
| 91 |
+
Since symlinks support can change depending on the mounted disk, we need to check
|
| 92 |
+
on the precise cache folder. By default, the default HF cache directory is checked.
|
| 93 |
+
|
| 94 |
+
Args:
|
| 95 |
+
cache_dir (`str`, `Path`, *optional*):
|
| 96 |
+
Path to the folder where cached files are stored.
|
| 97 |
+
|
| 98 |
+
Returns: [bool] Whether symlinks are supported in the directory.
|
| 99 |
+
"""
|
| 100 |
+
# Defaults to HF cache
|
| 101 |
+
if cache_dir is None:
|
| 102 |
+
cache_dir = constants.HF_HUB_CACHE
|
| 103 |
+
cache_dir = str(Path(cache_dir).expanduser().resolve()) # make it unique
|
| 104 |
+
|
| 105 |
+
# Check symlink compatibility only once (per cache directory) at first time use
|
| 106 |
+
if cache_dir not in _are_symlinks_supported_in_dir:
|
| 107 |
+
_are_symlinks_supported_in_dir[cache_dir] = True
|
| 108 |
+
|
| 109 |
+
os.makedirs(cache_dir, exist_ok=True)
|
| 110 |
+
with SoftTemporaryDirectory(dir=cache_dir) as tmpdir:
|
| 111 |
+
src_path = Path(tmpdir) / "dummy_file_src"
|
| 112 |
+
src_path.touch()
|
| 113 |
+
dst_path = Path(tmpdir) / "dummy_file_dst"
|
| 114 |
+
|
| 115 |
+
# Relative source path as in `_create_symlink``
|
| 116 |
+
relative_src = os.path.relpath(src_path, start=os.path.dirname(dst_path))
|
| 117 |
+
try:
|
| 118 |
+
os.symlink(relative_src, dst_path)
|
| 119 |
+
except OSError:
|
| 120 |
+
# Likely running on Windows
|
| 121 |
+
_are_symlinks_supported_in_dir[cache_dir] = False
|
| 122 |
+
|
| 123 |
+
if not constants.HF_HUB_DISABLE_SYMLINKS_WARNING:
|
| 124 |
+
message = (
|
| 125 |
+
"`huggingface_hub` cache-system uses symlinks by default to"
|
| 126 |
+
" efficiently store duplicated files but your machine does not"
|
| 127 |
+
f" support them in {cache_dir}. Caching files will still work"
|
| 128 |
+
" but in a degraded version that might require more space on"
|
| 129 |
+
" your disk. This warning can be disabled by setting the"
|
| 130 |
+
" `HF_HUB_DISABLE_SYMLINKS_WARNING` environment variable. For"
|
| 131 |
+
" more details, see"
|
| 132 |
+
" https://huggingface.co/docs/huggingface_hub/how-to-cache#limitations."
|
| 133 |
+
)
|
| 134 |
+
if os.name == "nt":
|
| 135 |
+
message += (
|
| 136 |
+
"\nTo support symlinks on Windows, you either need to"
|
| 137 |
+
" activate Developer Mode or to run Python as an"
|
| 138 |
+
" administrator. In order to activate developer mode,"
|
| 139 |
+
" see this article:"
|
| 140 |
+
" https://docs.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development"
|
| 141 |
+
)
|
| 142 |
+
warnings.warn(message)
|
| 143 |
+
|
| 144 |
+
return _are_symlinks_supported_in_dir[cache_dir]
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
@dataclass(frozen=True)
|
| 148 |
+
class HfFileMetadata:
|
| 149 |
+
"""Data structure containing information about a file versioned on the Hub.
|
| 150 |
+
|
| 151 |
+
Returned by [`get_hf_file_metadata`] based on a URL.
|
| 152 |
+
|
| 153 |
+
Args:
|
| 154 |
+
commit_hash (`str`, *optional*):
|
| 155 |
+
The commit_hash related to the file.
|
| 156 |
+
etag (`str`, *optional*):
|
| 157 |
+
Etag of the file on the server.
|
| 158 |
+
location (`str`):
|
| 159 |
+
Location where to download the file. Can be a Hub url or not (CDN).
|
| 160 |
+
size (`size`):
|
| 161 |
+
Size of the file. In case of an LFS file, contains the size of the actual
|
| 162 |
+
LFS file, not the pointer.
|
| 163 |
+
"""
|
| 164 |
+
|
| 165 |
+
commit_hash: Optional[str]
|
| 166 |
+
etag: Optional[str]
|
| 167 |
+
location: str
|
| 168 |
+
size: Optional[int]
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
@validate_hf_hub_args
|
| 172 |
+
def hf_hub_url(
|
| 173 |
+
repo_id: str,
|
| 174 |
+
filename: str,
|
| 175 |
+
*,
|
| 176 |
+
subfolder: Optional[str] = None,
|
| 177 |
+
repo_type: Optional[str] = None,
|
| 178 |
+
revision: Optional[str] = None,
|
| 179 |
+
endpoint: Optional[str] = None,
|
| 180 |
+
) -> str:
|
| 181 |
+
"""Construct the URL of a file from the given information.
|
| 182 |
+
|
| 183 |
+
The resolved address can either be a huggingface.co-hosted url, or a link to
|
| 184 |
+
Cloudfront (a Content Delivery Network, or CDN) for large files which are
|
| 185 |
+
more than a few MBs.
|
| 186 |
+
|
| 187 |
+
Args:
|
| 188 |
+
repo_id (`str`):
|
| 189 |
+
A namespace (user or an organization) name and a repo name separated
|
| 190 |
+
by a `/`.
|
| 191 |
+
filename (`str`):
|
| 192 |
+
The name of the file in the repo.
|
| 193 |
+
subfolder (`str`, *optional*):
|
| 194 |
+
An optional value corresponding to a folder inside the repo.
|
| 195 |
+
repo_type (`str`, *optional*):
|
| 196 |
+
Set to `"dataset"` or `"space"` if downloading from a dataset or space,
|
| 197 |
+
`None` or `"model"` if downloading from a model. Default is `None`.
|
| 198 |
+
revision (`str`, *optional*):
|
| 199 |
+
An optional Git revision id which can be a branch name, a tag, or a
|
| 200 |
+
commit hash.
|
| 201 |
+
|
| 202 |
+
Example:
|
| 203 |
+
|
| 204 |
+
```python
|
| 205 |
+
>>> from huggingface_hub import hf_hub_url
|
| 206 |
+
|
| 207 |
+
>>> hf_hub_url(
|
| 208 |
+
... repo_id="julien-c/EsperBERTo-small", filename="pytorch_model.bin"
|
| 209 |
+
... )
|
| 210 |
+
'https://huggingface.co/julien-c/EsperBERTo-small/resolve/main/pytorch_model.bin'
|
| 211 |
+
```
|
| 212 |
+
|
| 213 |
+
<Tip>
|
| 214 |
+
|
| 215 |
+
Notes:
|
| 216 |
+
|
| 217 |
+
Cloudfront is replicated over the globe so downloads are way faster for
|
| 218 |
+
the end user (and it also lowers our bandwidth costs).
|
| 219 |
+
|
| 220 |
+
Cloudfront aggressively caches files by default (default TTL is 24
|
| 221 |
+
hours), however this is not an issue here because we implement a
|
| 222 |
+
git-based versioning system on huggingface.co, which means that we store
|
| 223 |
+
the files on S3/Cloudfront in a content-addressable way (i.e., the file
|
| 224 |
+
name is its hash). Using content-addressable filenames means cache can't
|
| 225 |
+
ever be stale.
|
| 226 |
+
|
| 227 |
+
In terms of client-side caching from this library, we base our caching
|
| 228 |
+
on the objects' entity tag (`ETag`), which is an identifier of a
|
| 229 |
+
specific version of a resource [1]_. An object's ETag is: its git-sha1
|
| 230 |
+
if stored in git, or its sha256 if stored in git-lfs.
|
| 231 |
+
|
| 232 |
+
</Tip>
|
| 233 |
+
|
| 234 |
+
References:
|
| 235 |
+
|
| 236 |
+
- [1] https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag
|
| 237 |
+
"""
|
| 238 |
+
if subfolder == "":
|
| 239 |
+
subfolder = None
|
| 240 |
+
if subfolder is not None:
|
| 241 |
+
filename = f"{subfolder}/{filename}"
|
| 242 |
+
|
| 243 |
+
if repo_type not in constants.REPO_TYPES:
|
| 244 |
+
raise ValueError("Invalid repo type")
|
| 245 |
+
|
| 246 |
+
if repo_type in constants.REPO_TYPES_URL_PREFIXES:
|
| 247 |
+
repo_id = constants.REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
|
| 248 |
+
|
| 249 |
+
if revision is None:
|
| 250 |
+
revision = constants.DEFAULT_REVISION
|
| 251 |
+
url = HUGGINGFACE_CO_URL_TEMPLATE.format(
|
| 252 |
+
repo_id=repo_id, revision=quote(revision, safe=""), filename=quote(filename)
|
| 253 |
+
)
|
| 254 |
+
# Update endpoint if provided
|
| 255 |
+
if endpoint is not None and url.startswith(constants.ENDPOINT):
|
| 256 |
+
url = endpoint + url[len(constants.ENDPOINT) :]
|
| 257 |
+
return url
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def _request_wrapper(
|
| 261 |
+
method: HTTP_METHOD_T, url: str, *, follow_relative_redirects: bool = False, **params
|
| 262 |
+
) -> requests.Response:
|
| 263 |
+
"""Wrapper around requests methods to follow relative redirects if `follow_relative_redirects=True` even when
|
| 264 |
+
`allow_redirection=False`.
|
| 265 |
+
|
| 266 |
+
Args:
|
| 267 |
+
method (`str`):
|
| 268 |
+
HTTP method, such as 'GET' or 'HEAD'.
|
| 269 |
+
url (`str`):
|
| 270 |
+
The URL of the resource to fetch.
|
| 271 |
+
follow_relative_redirects (`bool`, *optional*, defaults to `False`)
|
| 272 |
+
If True, relative redirection (redirection to the same site) will be resolved even when `allow_redirection`
|
| 273 |
+
kwarg is set to False. Useful when we want to follow a redirection to a renamed repository without
|
| 274 |
+
following redirection to a CDN.
|
| 275 |
+
**params (`dict`, *optional*):
|
| 276 |
+
Params to pass to `requests.request`.
|
| 277 |
+
"""
|
| 278 |
+
# Recursively follow relative redirects
|
| 279 |
+
if follow_relative_redirects:
|
| 280 |
+
response = _request_wrapper(
|
| 281 |
+
method=method,
|
| 282 |
+
url=url,
|
| 283 |
+
follow_relative_redirects=False,
|
| 284 |
+
**params,
|
| 285 |
+
)
|
| 286 |
+
|
| 287 |
+
# If redirection, we redirect only relative paths.
|
| 288 |
+
# This is useful in case of a renamed repository.
|
| 289 |
+
if 300 <= response.status_code <= 399:
|
| 290 |
+
parsed_target = urlparse(response.headers["Location"])
|
| 291 |
+
if parsed_target.netloc == "":
|
| 292 |
+
# This means it is a relative 'location' headers, as allowed by RFC 7231.
|
| 293 |
+
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
| 294 |
+
# We want to follow this relative redirect !
|
| 295 |
+
#
|
| 296 |
+
# Highly inspired by `resolve_redirects` from requests library.
|
| 297 |
+
# See https://github.com/psf/requests/blob/main/requests/sessions.py#L159
|
| 298 |
+
next_url = urlparse(url)._replace(path=parsed_target.path).geturl()
|
| 299 |
+
return _request_wrapper(method=method, url=next_url, follow_relative_redirects=True, **params)
|
| 300 |
+
return response
|
| 301 |
+
|
| 302 |
+
# Perform request and return if status_code is not in the retry list.
|
| 303 |
+
response = get_session().request(method=method, url=url, **params)
|
| 304 |
+
hf_raise_for_status(response)
|
| 305 |
+
return response
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
def http_get(
|
| 309 |
+
url: str,
|
| 310 |
+
temp_file: BinaryIO,
|
| 311 |
+
*,
|
| 312 |
+
proxies: Optional[Dict] = None,
|
| 313 |
+
resume_size: int = 0,
|
| 314 |
+
headers: Optional[Dict[str, Any]] = None,
|
| 315 |
+
expected_size: Optional[int] = None,
|
| 316 |
+
displayed_filename: Optional[str] = None,
|
| 317 |
+
_nb_retries: int = 5,
|
| 318 |
+
_tqdm_bar: Optional[tqdm] = None,
|
| 319 |
+
) -> None:
|
| 320 |
+
"""
|
| 321 |
+
Download a remote file. Do not gobble up errors, and will return errors tailored to the Hugging Face Hub.
|
| 322 |
+
|
| 323 |
+
If ConnectionError (SSLError) or ReadTimeout happen while streaming data from the server, it is most likely a
|
| 324 |
+
transient error (network outage?). We log a warning message and try to resume the download a few times before
|
| 325 |
+
giving up. The method gives up after 5 attempts if no new data has being received from the server.
|
| 326 |
+
|
| 327 |
+
Args:
|
| 328 |
+
url (`str`):
|
| 329 |
+
The URL of the file to download.
|
| 330 |
+
temp_file (`BinaryIO`):
|
| 331 |
+
The file-like object where to save the file.
|
| 332 |
+
proxies (`dict`, *optional*):
|
| 333 |
+
Dictionary mapping protocol to the URL of the proxy passed to `requests.request`.
|
| 334 |
+
resume_size (`int`, *optional*):
|
| 335 |
+
The number of bytes already downloaded. If set to 0 (default), the whole file is download. If set to a
|
| 336 |
+
positive number, the download will resume at the given position.
|
| 337 |
+
headers (`dict`, *optional*):
|
| 338 |
+
Dictionary of HTTP Headers to send with the request.
|
| 339 |
+
expected_size (`int`, *optional*):
|
| 340 |
+
The expected size of the file to download. If set, the download will raise an error if the size of the
|
| 341 |
+
received content is different from the expected one.
|
| 342 |
+
displayed_filename (`str`, *optional*):
|
| 343 |
+
The filename of the file that is being downloaded. Value is used only to display a nice progress bar. If
|
| 344 |
+
not set, the filename is guessed from the URL or the `Content-Disposition` header.
|
| 345 |
+
"""
|
| 346 |
+
if expected_size is not None and resume_size == expected_size:
|
| 347 |
+
# If the file is already fully downloaded, we don't need to download it again.
|
| 348 |
+
return
|
| 349 |
+
|
| 350 |
+
hf_transfer = None
|
| 351 |
+
if constants.HF_HUB_ENABLE_HF_TRANSFER:
|
| 352 |
+
if resume_size != 0:
|
| 353 |
+
warnings.warn("'hf_transfer' does not support `resume_size`: falling back to regular download method")
|
| 354 |
+
elif proxies is not None:
|
| 355 |
+
warnings.warn("'hf_transfer' does not support `proxies`: falling back to regular download method")
|
| 356 |
+
else:
|
| 357 |
+
try:
|
| 358 |
+
import hf_transfer # type: ignore[no-redef]
|
| 359 |
+
except ImportError:
|
| 360 |
+
raise ValueError(
|
| 361 |
+
"Fast download using 'hf_transfer' is enabled"
|
| 362 |
+
" (HF_HUB_ENABLE_HF_TRANSFER=1) but 'hf_transfer' package is not"
|
| 363 |
+
" available in your environment. Try `pip install hf_transfer`."
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
initial_headers = headers
|
| 367 |
+
headers = copy.deepcopy(headers) or {}
|
| 368 |
+
if resume_size > 0:
|
| 369 |
+
headers["Range"] = _adjust_range_header(headers.get("Range"), resume_size)
|
| 370 |
+
|
| 371 |
+
r = _request_wrapper(
|
| 372 |
+
method="GET", url=url, stream=True, proxies=proxies, headers=headers, timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT
|
| 373 |
+
)
|
| 374 |
+
hf_raise_for_status(r)
|
| 375 |
+
content_length = r.headers.get("Content-Length")
|
| 376 |
+
|
| 377 |
+
# NOTE: 'total' is the total number of bytes to download, not the number of bytes in the file.
|
| 378 |
+
# If the file is compressed, the number of bytes in the saved file will be higher than 'total'.
|
| 379 |
+
total = resume_size + int(content_length) if content_length is not None else None
|
| 380 |
+
|
| 381 |
+
if displayed_filename is None:
|
| 382 |
+
displayed_filename = url
|
| 383 |
+
content_disposition = r.headers.get("Content-Disposition")
|
| 384 |
+
if content_disposition is not None:
|
| 385 |
+
match = HEADER_FILENAME_PATTERN.search(content_disposition)
|
| 386 |
+
if match is not None:
|
| 387 |
+
# Means file is on CDN
|
| 388 |
+
displayed_filename = match.groupdict()["filename"]
|
| 389 |
+
|
| 390 |
+
# Truncate filename if too long to display
|
| 391 |
+
if len(displayed_filename) > 40:
|
| 392 |
+
displayed_filename = f"(…){displayed_filename[-40:]}"
|
| 393 |
+
|
| 394 |
+
consistency_error_message = (
|
| 395 |
+
f"Consistency check failed: file should be of size {expected_size} but has size"
|
| 396 |
+
f" {{actual_size}} ({displayed_filename}).\nThis is usually due to network issues while downloading the file."
|
| 397 |
+
" Please retry with `force_download=True`."
|
| 398 |
+
)
|
| 399 |
+
|
| 400 |
+
# Stream file to buffer
|
| 401 |
+
progress_cm: tqdm = (
|
| 402 |
+
tqdm( # type: ignore[assignment]
|
| 403 |
+
unit="B",
|
| 404 |
+
unit_scale=True,
|
| 405 |
+
total=total,
|
| 406 |
+
initial=resume_size,
|
| 407 |
+
desc=displayed_filename,
|
| 408 |
+
disable=is_tqdm_disabled(logger.getEffectiveLevel()),
|
| 409 |
+
name="huggingface_hub.http_get",
|
| 410 |
+
)
|
| 411 |
+
if _tqdm_bar is None
|
| 412 |
+
else contextlib.nullcontext(_tqdm_bar)
|
| 413 |
+
# ^ `contextlib.nullcontext` mimics a context manager that does nothing
|
| 414 |
+
# Makes it easier to use the same code path for both cases but in the later
|
| 415 |
+
# case, the progress bar is not closed when exiting the context manager.
|
| 416 |
+
)
|
| 417 |
+
|
| 418 |
+
with progress_cm as progress:
|
| 419 |
+
if hf_transfer and total is not None and total > 5 * constants.DOWNLOAD_CHUNK_SIZE:
|
| 420 |
+
supports_callback = "callback" in inspect.signature(hf_transfer.download).parameters
|
| 421 |
+
if not supports_callback:
|
| 422 |
+
warnings.warn(
|
| 423 |
+
"You are using an outdated version of `hf_transfer`. "
|
| 424 |
+
"Consider upgrading to latest version to enable progress bars "
|
| 425 |
+
"using `pip install -U hf_transfer`."
|
| 426 |
+
)
|
| 427 |
+
try:
|
| 428 |
+
hf_transfer.download(
|
| 429 |
+
url=url,
|
| 430 |
+
filename=temp_file.name,
|
| 431 |
+
max_files=constants.HF_TRANSFER_CONCURRENCY,
|
| 432 |
+
chunk_size=constants.DOWNLOAD_CHUNK_SIZE,
|
| 433 |
+
headers=headers,
|
| 434 |
+
parallel_failures=3,
|
| 435 |
+
max_retries=5,
|
| 436 |
+
**({"callback": progress.update} if supports_callback else {}),
|
| 437 |
+
)
|
| 438 |
+
except Exception as e:
|
| 439 |
+
raise RuntimeError(
|
| 440 |
+
"An error occurred while downloading using `hf_transfer`. Consider"
|
| 441 |
+
" disabling HF_HUB_ENABLE_HF_TRANSFER for better error handling."
|
| 442 |
+
) from e
|
| 443 |
+
if not supports_callback:
|
| 444 |
+
progress.update(total)
|
| 445 |
+
if expected_size is not None and expected_size != os.path.getsize(temp_file.name):
|
| 446 |
+
raise EnvironmentError(
|
| 447 |
+
consistency_error_message.format(
|
| 448 |
+
actual_size=os.path.getsize(temp_file.name),
|
| 449 |
+
)
|
| 450 |
+
)
|
| 451 |
+
return
|
| 452 |
+
new_resume_size = resume_size
|
| 453 |
+
try:
|
| 454 |
+
for chunk in r.iter_content(chunk_size=constants.DOWNLOAD_CHUNK_SIZE):
|
| 455 |
+
if chunk: # filter out keep-alive new chunks
|
| 456 |
+
progress.update(len(chunk))
|
| 457 |
+
temp_file.write(chunk)
|
| 458 |
+
new_resume_size += len(chunk)
|
| 459 |
+
# Some data has been downloaded from the server so we reset the number of retries.
|
| 460 |
+
_nb_retries = 5
|
| 461 |
+
except (requests.ConnectionError, requests.ReadTimeout) as e:
|
| 462 |
+
# If ConnectionError (SSLError) or ReadTimeout happen while streaming data from the server, it is most likely
|
| 463 |
+
# a transient error (network outage?). We log a warning message and try to resume the download a few times
|
| 464 |
+
# before giving up. Tre retry mechanism is basic but should be enough in most cases.
|
| 465 |
+
if _nb_retries <= 0:
|
| 466 |
+
logger.warning("Error while downloading from %s: %s\nMax retries exceeded.", url, str(e))
|
| 467 |
+
raise
|
| 468 |
+
logger.warning("Error while downloading from %s: %s\nTrying to resume download...", url, str(e))
|
| 469 |
+
time.sleep(1)
|
| 470 |
+
reset_sessions() # In case of SSLError it's best to reset the shared requests.Session objects
|
| 471 |
+
return http_get(
|
| 472 |
+
url=url,
|
| 473 |
+
temp_file=temp_file,
|
| 474 |
+
proxies=proxies,
|
| 475 |
+
resume_size=new_resume_size,
|
| 476 |
+
headers=initial_headers,
|
| 477 |
+
expected_size=expected_size,
|
| 478 |
+
_nb_retries=_nb_retries - 1,
|
| 479 |
+
_tqdm_bar=_tqdm_bar,
|
| 480 |
+
)
|
| 481 |
+
|
| 482 |
+
if expected_size is not None and expected_size != temp_file.tell():
|
| 483 |
+
raise EnvironmentError(
|
| 484 |
+
consistency_error_message.format(
|
| 485 |
+
actual_size=temp_file.tell(),
|
| 486 |
+
)
|
| 487 |
+
)
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
def _normalize_etag(etag: Optional[str]) -> Optional[str]:
|
| 491 |
+
"""Normalize ETag HTTP header, so it can be used to create nice filepaths.
|
| 492 |
+
|
| 493 |
+
The HTTP spec allows two forms of ETag:
|
| 494 |
+
ETag: W/"<etag_value>"
|
| 495 |
+
ETag: "<etag_value>"
|
| 496 |
+
|
| 497 |
+
For now, we only expect the second form from the server, but we want to be future-proof so we support both. For
|
| 498 |
+
more context, see `TestNormalizeEtag` tests and https://github.com/huggingface/huggingface_hub/pull/1428.
|
| 499 |
+
|
| 500 |
+
Args:
|
| 501 |
+
etag (`str`, *optional*): HTTP header
|
| 502 |
+
|
| 503 |
+
Returns:
|
| 504 |
+
`str` or `None`: string that can be used as a nice directory name.
|
| 505 |
+
Returns `None` if input is None.
|
| 506 |
+
"""
|
| 507 |
+
if etag is None:
|
| 508 |
+
return None
|
| 509 |
+
return etag.lstrip("W/").strip('"')
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
def _create_relative_symlink(src: str, dst: str, new_blob: bool = False) -> None:
|
| 513 |
+
"""Alias method used in `transformers` conversion script."""
|
| 514 |
+
return _create_symlink(src=src, dst=dst, new_blob=new_blob)
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
def _create_symlink(src: str, dst: str, new_blob: bool = False) -> None:
|
| 518 |
+
"""Create a symbolic link named dst pointing to src.
|
| 519 |
+
|
| 520 |
+
By default, it will try to create a symlink using a relative path. Relative paths have 2 advantages:
|
| 521 |
+
- If the cache_folder is moved (example: back-up on a shared drive), relative paths within the cache folder will
|
| 522 |
+
not break.
|
| 523 |
+
- Relative paths seems to be better handled on Windows. Issue was reported 3 times in less than a week when
|
| 524 |
+
changing from relative to absolute paths. See https://github.com/huggingface/huggingface_hub/issues/1398,
|
| 525 |
+
https://github.com/huggingface/diffusers/issues/2729 and https://github.com/huggingface/transformers/pull/22228.
|
| 526 |
+
NOTE: The issue with absolute paths doesn't happen on admin mode.
|
| 527 |
+
When creating a symlink from the cache to a local folder, it is possible that a relative path cannot be created.
|
| 528 |
+
This happens when paths are not on the same volume. In that case, we use absolute paths.
|
| 529 |
+
|
| 530 |
+
|
| 531 |
+
The result layout looks something like
|
| 532 |
+
└── [ 128] snapshots
|
| 533 |
+
├── [ 128] 2439f60ef33a0d46d85da5001d52aeda5b00ce9f
|
| 534 |
+
│ ├── [ 52] README.md -> ../../../blobs/d7edf6bd2a681fb0175f7735299831ee1b22b812
|
| 535 |
+
│ └── [ 76] pytorch_model.bin -> ../../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd
|
| 536 |
+
|
| 537 |
+
If symlinks cannot be created on this platform (most likely to be Windows), the workaround is to avoid symlinks by
|
| 538 |
+
having the actual file in `dst`. If it is a new file (`new_blob=True`), we move it to `dst`. If it is not a new file
|
| 539 |
+
(`new_blob=False`), we don't know if the blob file is already referenced elsewhere. To avoid breaking existing
|
| 540 |
+
cache, the file is duplicated on the disk.
|
| 541 |
+
|
| 542 |
+
In case symlinks are not supported, a warning message is displayed to the user once when loading `huggingface_hub`.
|
| 543 |
+
The warning message can be disabled with the `DISABLE_SYMLINKS_WARNING` environment variable.
|
| 544 |
+
"""
|
| 545 |
+
try:
|
| 546 |
+
os.remove(dst)
|
| 547 |
+
except OSError:
|
| 548 |
+
pass
|
| 549 |
+
|
| 550 |
+
abs_src = os.path.abspath(os.path.expanduser(src))
|
| 551 |
+
abs_dst = os.path.abspath(os.path.expanduser(dst))
|
| 552 |
+
abs_dst_folder = os.path.dirname(abs_dst)
|
| 553 |
+
|
| 554 |
+
# Use relative_dst in priority
|
| 555 |
+
try:
|
| 556 |
+
relative_src = os.path.relpath(abs_src, abs_dst_folder)
|
| 557 |
+
except ValueError:
|
| 558 |
+
# Raised on Windows if src and dst are not on the same volume. This is the case when creating a symlink to a
|
| 559 |
+
# local_dir instead of within the cache directory.
|
| 560 |
+
# See https://docs.python.org/3/library/os.path.html#os.path.relpath
|
| 561 |
+
relative_src = None
|
| 562 |
+
|
| 563 |
+
try:
|
| 564 |
+
commonpath = os.path.commonpath([abs_src, abs_dst])
|
| 565 |
+
_support_symlinks = are_symlinks_supported(commonpath)
|
| 566 |
+
except ValueError:
|
| 567 |
+
# Raised if src and dst are not on the same volume. Symlinks will still work on Linux/Macos.
|
| 568 |
+
# See https://docs.python.org/3/library/os.path.html#os.path.commonpath
|
| 569 |
+
_support_symlinks = os.name != "nt"
|
| 570 |
+
except PermissionError:
|
| 571 |
+
# Permission error means src and dst are not in the same volume (e.g. destination path has been provided
|
| 572 |
+
# by the user via `local_dir`. Let's test symlink support there)
|
| 573 |
+
_support_symlinks = are_symlinks_supported(abs_dst_folder)
|
| 574 |
+
except OSError as e:
|
| 575 |
+
# OS error (errno=30) means that the commonpath is readonly on Linux/MacOS.
|
| 576 |
+
if e.errno == errno.EROFS:
|
| 577 |
+
_support_symlinks = are_symlinks_supported(abs_dst_folder)
|
| 578 |
+
else:
|
| 579 |
+
raise
|
| 580 |
+
|
| 581 |
+
# Symlinks are supported => let's create a symlink.
|
| 582 |
+
if _support_symlinks:
|
| 583 |
+
src_rel_or_abs = relative_src or abs_src
|
| 584 |
+
logger.debug(f"Creating pointer from {src_rel_or_abs} to {abs_dst}")
|
| 585 |
+
try:
|
| 586 |
+
os.symlink(src_rel_or_abs, abs_dst)
|
| 587 |
+
return
|
| 588 |
+
except FileExistsError:
|
| 589 |
+
if os.path.islink(abs_dst) and os.path.realpath(abs_dst) == os.path.realpath(abs_src):
|
| 590 |
+
# `abs_dst` already exists and is a symlink to the `abs_src` blob. It is most likely that the file has
|
| 591 |
+
# been cached twice concurrently (exactly between `os.remove` and `os.symlink`). Do nothing.
|
| 592 |
+
return
|
| 593 |
+
else:
|
| 594 |
+
# Very unlikely to happen. Means a file `dst` has been created exactly between `os.remove` and
|
| 595 |
+
# `os.symlink` and is not a symlink to the `abs_src` blob file. Raise exception.
|
| 596 |
+
raise
|
| 597 |
+
except PermissionError:
|
| 598 |
+
# Permission error means src and dst are not in the same volume (e.g. download to local dir) and symlink
|
| 599 |
+
# is supported on both volumes but not between them. Let's just make a hard copy in that case.
|
| 600 |
+
pass
|
| 601 |
+
|
| 602 |
+
# Symlinks are not supported => let's move or copy the file.
|
| 603 |
+
if new_blob:
|
| 604 |
+
logger.info(f"Symlink not supported. Moving file from {abs_src} to {abs_dst}")
|
| 605 |
+
shutil.move(abs_src, abs_dst, copy_function=_copy_no_matter_what)
|
| 606 |
+
else:
|
| 607 |
+
logger.info(f"Symlink not supported. Copying file from {abs_src} to {abs_dst}")
|
| 608 |
+
shutil.copyfile(abs_src, abs_dst)
|
| 609 |
+
|
| 610 |
+
|
| 611 |
+
def _cache_commit_hash_for_specific_revision(storage_folder: str, revision: str, commit_hash: str) -> None:
|
| 612 |
+
"""Cache reference between a revision (tag, branch or truncated commit hash) and the corresponding commit hash.
|
| 613 |
+
|
| 614 |
+
Does nothing if `revision` is already a proper `commit_hash` or reference is already cached.
|
| 615 |
+
"""
|
| 616 |
+
if revision != commit_hash:
|
| 617 |
+
ref_path = Path(storage_folder) / "refs" / revision
|
| 618 |
+
ref_path.parent.mkdir(parents=True, exist_ok=True)
|
| 619 |
+
if not ref_path.exists() or commit_hash != ref_path.read_text():
|
| 620 |
+
# Update ref only if has been updated. Could cause useless error in case
|
| 621 |
+
# repo is already cached and user doesn't have write access to cache folder.
|
| 622 |
+
# See https://github.com/huggingface/huggingface_hub/issues/1216.
|
| 623 |
+
ref_path.write_text(commit_hash)
|
| 624 |
+
|
| 625 |
+
|
| 626 |
+
@validate_hf_hub_args
|
| 627 |
+
def repo_folder_name(*, repo_id: str, repo_type: str) -> str:
|
| 628 |
+
"""Return a serialized version of a hf.co repo name and type, safe for disk storage
|
| 629 |
+
as a single non-nested folder.
|
| 630 |
+
|
| 631 |
+
Example: models--julien-c--EsperBERTo-small
|
| 632 |
+
"""
|
| 633 |
+
# remove all `/` occurrences to correctly convert repo to directory name
|
| 634 |
+
parts = [f"{repo_type}s", *repo_id.split("/")]
|
| 635 |
+
return constants.REPO_ID_SEPARATOR.join(parts)
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
def _check_disk_space(expected_size: int, target_dir: Union[str, Path]) -> None:
|
| 639 |
+
"""Check disk usage and log a warning if there is not enough disk space to download the file.
|
| 640 |
+
|
| 641 |
+
Args:
|
| 642 |
+
expected_size (`int`):
|
| 643 |
+
The expected size of the file in bytes.
|
| 644 |
+
target_dir (`str`):
|
| 645 |
+
The directory where the file will be stored after downloading.
|
| 646 |
+
"""
|
| 647 |
+
|
| 648 |
+
target_dir = Path(target_dir) # format as `Path`
|
| 649 |
+
for path in [target_dir] + list(target_dir.parents): # first check target_dir, then each parents one by one
|
| 650 |
+
try:
|
| 651 |
+
target_dir_free = shutil.disk_usage(path).free
|
| 652 |
+
if target_dir_free < expected_size:
|
| 653 |
+
warnings.warn(
|
| 654 |
+
"Not enough free disk space to download the file. "
|
| 655 |
+
f"The expected file size is: {expected_size / 1e6:.2f} MB. "
|
| 656 |
+
f"The target location {target_dir} only has {target_dir_free / 1e6:.2f} MB free disk space."
|
| 657 |
+
)
|
| 658 |
+
return
|
| 659 |
+
except OSError: # raise on anything: file does not exist or space disk cannot be checked
|
| 660 |
+
pass
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
@validate_hf_hub_args
|
| 664 |
+
def hf_hub_download(
|
| 665 |
+
repo_id: str,
|
| 666 |
+
filename: str,
|
| 667 |
+
*,
|
| 668 |
+
subfolder: Optional[str] = None,
|
| 669 |
+
repo_type: Optional[str] = None,
|
| 670 |
+
revision: Optional[str] = None,
|
| 671 |
+
library_name: Optional[str] = None,
|
| 672 |
+
library_version: Optional[str] = None,
|
| 673 |
+
cache_dir: Union[str, Path, None] = None,
|
| 674 |
+
local_dir: Union[str, Path, None] = None,
|
| 675 |
+
user_agent: Union[Dict, str, None] = None,
|
| 676 |
+
force_download: bool = False,
|
| 677 |
+
proxies: Optional[Dict] = None,
|
| 678 |
+
etag_timeout: float = constants.DEFAULT_ETAG_TIMEOUT,
|
| 679 |
+
token: Union[bool, str, None] = None,
|
| 680 |
+
local_files_only: bool = False,
|
| 681 |
+
headers: Optional[Dict[str, str]] = None,
|
| 682 |
+
endpoint: Optional[str] = None,
|
| 683 |
+
resume_download: Optional[bool] = None,
|
| 684 |
+
force_filename: Optional[str] = None,
|
| 685 |
+
local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
|
| 686 |
+
) -> str:
|
| 687 |
+
"""Download a given file if it's not already present in the local cache.
|
| 688 |
+
|
| 689 |
+
The new cache file layout looks like this:
|
| 690 |
+
- The cache directory contains one subfolder per repo_id (namespaced by repo type)
|
| 691 |
+
- inside each repo folder:
|
| 692 |
+
- refs is a list of the latest known revision => commit_hash pairs
|
| 693 |
+
- blobs contains the actual file blobs (identified by their git-sha or sha256, depending on
|
| 694 |
+
whether they're LFS files or not)
|
| 695 |
+
- snapshots contains one subfolder per commit, each "commit" contains the subset of the files
|
| 696 |
+
that have been resolved at that particular commit. Each filename is a symlink to the blob
|
| 697 |
+
at that particular commit.
|
| 698 |
+
|
| 699 |
+
```
|
| 700 |
+
[ 96] .
|
| 701 |
+
└── [ 160] models--julien-c--EsperBERTo-small
|
| 702 |
+
├── [ 160] blobs
|
| 703 |
+
│ ├── [321M] 403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd
|
| 704 |
+
│ ├── [ 398] 7cb18dc9bafbfcf74629a4b760af1b160957a83e
|
| 705 |
+
│ └── [1.4K] d7edf6bd2a681fb0175f7735299831ee1b22b812
|
| 706 |
+
├── [ 96] refs
|
| 707 |
+
│ └── [ 40] main
|
| 708 |
+
└── [ 128] snapshots
|
| 709 |
+
├── [ 128] 2439f60ef33a0d46d85da5001d52aeda5b00ce9f
|
| 710 |
+
│ ├── [ 52] README.md -> ../../blobs/d7edf6bd2a681fb0175f7735299831ee1b22b812
|
| 711 |
+
│ └── [ 76] pytorch_model.bin -> ../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd
|
| 712 |
+
└── [ 128] bbc77c8132af1cc5cf678da3f1ddf2de43606d48
|
| 713 |
+
├── [ 52] README.md -> ../../blobs/7cb18dc9bafbfcf74629a4b760af1b160957a83e
|
| 714 |
+
└── [ 76] pytorch_model.bin -> ../../blobs/403450e234d65943a7dcf7e05a771ce3c92faa84dd07db4ac20f592037a1e4bd
|
| 715 |
+
```
|
| 716 |
+
|
| 717 |
+
If `local_dir` is provided, the file structure from the repo will be replicated in this location. When using this
|
| 718 |
+
option, the `cache_dir` will not be used and a `.cache/huggingface/` folder will be created at the root of `local_dir`
|
| 719 |
+
to store some metadata related to the downloaded files. While this mechanism is not as robust as the main
|
| 720 |
+
cache-system, it's optimized for regularly pulling the latest version of a repository.
|
| 721 |
+
|
| 722 |
+
Args:
|
| 723 |
+
repo_id (`str`):
|
| 724 |
+
A user or an organization name and a repo name separated by a `/`.
|
| 725 |
+
filename (`str`):
|
| 726 |
+
The name of the file in the repo.
|
| 727 |
+
subfolder (`str`, *optional*):
|
| 728 |
+
An optional value corresponding to a folder inside the model repo.
|
| 729 |
+
repo_type (`str`, *optional*):
|
| 730 |
+
Set to `"dataset"` or `"space"` if downloading from a dataset or space,
|
| 731 |
+
`None` or `"model"` if downloading from a model. Default is `None`.
|
| 732 |
+
revision (`str`, *optional*):
|
| 733 |
+
An optional Git revision id which can be a branch name, a tag, or a
|
| 734 |
+
commit hash.
|
| 735 |
+
library_name (`str`, *optional*):
|
| 736 |
+
The name of the library to which the object corresponds.
|
| 737 |
+
library_version (`str`, *optional*):
|
| 738 |
+
The version of the library.
|
| 739 |
+
cache_dir (`str`, `Path`, *optional*):
|
| 740 |
+
Path to the folder where cached files are stored.
|
| 741 |
+
local_dir (`str` or `Path`, *optional*):
|
| 742 |
+
If provided, the downloaded file will be placed under this directory.
|
| 743 |
+
user_agent (`dict`, `str`, *optional*):
|
| 744 |
+
The user-agent info in the form of a dictionary or a string.
|
| 745 |
+
force_download (`bool`, *optional*, defaults to `False`):
|
| 746 |
+
Whether the file should be downloaded even if it already exists in
|
| 747 |
+
the local cache.
|
| 748 |
+
proxies (`dict`, *optional*):
|
| 749 |
+
Dictionary mapping protocol to the URL of the proxy passed to
|
| 750 |
+
`requests.request`.
|
| 751 |
+
etag_timeout (`float`, *optional*, defaults to `10`):
|
| 752 |
+
When fetching ETag, how many seconds to wait for the server to send
|
| 753 |
+
data before giving up which is passed to `requests.request`.
|
| 754 |
+
token (`str`, `bool`, *optional*):
|
| 755 |
+
A token to be used for the download.
|
| 756 |
+
- If `True`, the token is read from the HuggingFace config
|
| 757 |
+
folder.
|
| 758 |
+
- If a string, it's used as the authentication token.
|
| 759 |
+
local_files_only (`bool`, *optional*, defaults to `False`):
|
| 760 |
+
If `True`, avoid downloading the file and return the path to the
|
| 761 |
+
local cached file if it exists.
|
| 762 |
+
headers (`dict`, *optional*):
|
| 763 |
+
Additional headers to be sent with the request.
|
| 764 |
+
|
| 765 |
+
Returns:
|
| 766 |
+
`str`: Local path of file or if networking is off, last version of file cached on disk.
|
| 767 |
+
|
| 768 |
+
Raises:
|
| 769 |
+
[`~utils.RepositoryNotFoundError`]
|
| 770 |
+
If the repository to download from cannot be found. This may be because it doesn't exist,
|
| 771 |
+
or because it is set to `private` and you do not have access.
|
| 772 |
+
[`~utils.RevisionNotFoundError`]
|
| 773 |
+
If the revision to download from cannot be found.
|
| 774 |
+
[`~utils.EntryNotFoundError`]
|
| 775 |
+
If the file to download cannot be found.
|
| 776 |
+
[`~utils.LocalEntryNotFoundError`]
|
| 777 |
+
If network is disabled or unavailable and file is not found in cache.
|
| 778 |
+
[`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
| 779 |
+
If `token=True` but the token cannot be found.
|
| 780 |
+
[`OSError`](https://docs.python.org/3/library/exceptions.html#OSError)
|
| 781 |
+
If ETag cannot be determined.
|
| 782 |
+
[`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
| 783 |
+
If some parameter value is invalid.
|
| 784 |
+
|
| 785 |
+
"""
|
| 786 |
+
if constants.HF_HUB_ETAG_TIMEOUT != constants.DEFAULT_ETAG_TIMEOUT:
|
| 787 |
+
# Respect environment variable above user value
|
| 788 |
+
etag_timeout = constants.HF_HUB_ETAG_TIMEOUT
|
| 789 |
+
|
| 790 |
+
if force_filename is not None:
|
| 791 |
+
warnings.warn(
|
| 792 |
+
"The `force_filename` parameter is deprecated as a new caching system, "
|
| 793 |
+
"which keeps the filenames as they are on the Hub, is now in place.",
|
| 794 |
+
FutureWarning,
|
| 795 |
+
)
|
| 796 |
+
if resume_download is not None:
|
| 797 |
+
warnings.warn(
|
| 798 |
+
"`resume_download` is deprecated and will be removed in version 1.0.0. "
|
| 799 |
+
"Downloads always resume when possible. "
|
| 800 |
+
"If you want to force a new download, use `force_download=True`.",
|
| 801 |
+
FutureWarning,
|
| 802 |
+
)
|
| 803 |
+
|
| 804 |
+
if cache_dir is None:
|
| 805 |
+
cache_dir = constants.HF_HUB_CACHE
|
| 806 |
+
if revision is None:
|
| 807 |
+
revision = constants.DEFAULT_REVISION
|
| 808 |
+
if isinstance(cache_dir, Path):
|
| 809 |
+
cache_dir = str(cache_dir)
|
| 810 |
+
if isinstance(local_dir, Path):
|
| 811 |
+
local_dir = str(local_dir)
|
| 812 |
+
|
| 813 |
+
if subfolder == "":
|
| 814 |
+
subfolder = None
|
| 815 |
+
if subfolder is not None:
|
| 816 |
+
# This is used to create a URL, and not a local path, hence the forward slash.
|
| 817 |
+
filename = f"{subfolder}/{filename}"
|
| 818 |
+
|
| 819 |
+
if repo_type is None:
|
| 820 |
+
repo_type = "model"
|
| 821 |
+
if repo_type not in constants.REPO_TYPES:
|
| 822 |
+
raise ValueError(f"Invalid repo type: {repo_type}. Accepted repo types are: {str(constants.REPO_TYPES)}")
|
| 823 |
+
|
| 824 |
+
hf_headers = build_hf_headers(
|
| 825 |
+
token=token,
|
| 826 |
+
library_name=library_name,
|
| 827 |
+
library_version=library_version,
|
| 828 |
+
user_agent=user_agent,
|
| 829 |
+
headers=headers,
|
| 830 |
+
)
|
| 831 |
+
|
| 832 |
+
if local_dir is not None:
|
| 833 |
+
if local_dir_use_symlinks != "auto":
|
| 834 |
+
warnings.warn(
|
| 835 |
+
"`local_dir_use_symlinks` parameter is deprecated and will be ignored. "
|
| 836 |
+
"The process to download files to a local folder has been updated and do "
|
| 837 |
+
"not rely on symlinks anymore. You only need to pass a destination folder "
|
| 838 |
+
"as`local_dir`.\n"
|
| 839 |
+
"For more details, check out https://huggingface.co/docs/huggingface_hub/main/en/guides/download#download-files-to-local-folder."
|
| 840 |
+
)
|
| 841 |
+
|
| 842 |
+
return _hf_hub_download_to_local_dir(
|
| 843 |
+
# Destination
|
| 844 |
+
local_dir=local_dir,
|
| 845 |
+
# File info
|
| 846 |
+
repo_id=repo_id,
|
| 847 |
+
repo_type=repo_type,
|
| 848 |
+
filename=filename,
|
| 849 |
+
revision=revision,
|
| 850 |
+
# HTTP info
|
| 851 |
+
endpoint=endpoint,
|
| 852 |
+
etag_timeout=etag_timeout,
|
| 853 |
+
headers=hf_headers,
|
| 854 |
+
proxies=proxies,
|
| 855 |
+
token=token,
|
| 856 |
+
# Additional options
|
| 857 |
+
cache_dir=cache_dir,
|
| 858 |
+
force_download=force_download,
|
| 859 |
+
local_files_only=local_files_only,
|
| 860 |
+
)
|
| 861 |
+
else:
|
| 862 |
+
return _hf_hub_download_to_cache_dir(
|
| 863 |
+
# Destination
|
| 864 |
+
cache_dir=cache_dir,
|
| 865 |
+
# File info
|
| 866 |
+
repo_id=repo_id,
|
| 867 |
+
filename=filename,
|
| 868 |
+
repo_type=repo_type,
|
| 869 |
+
revision=revision,
|
| 870 |
+
# HTTP info
|
| 871 |
+
endpoint=endpoint,
|
| 872 |
+
etag_timeout=etag_timeout,
|
| 873 |
+
headers=hf_headers,
|
| 874 |
+
proxies=proxies,
|
| 875 |
+
token=token,
|
| 876 |
+
# Additional options
|
| 877 |
+
local_files_only=local_files_only,
|
| 878 |
+
force_download=force_download,
|
| 879 |
+
)
|
| 880 |
+
|
| 881 |
+
|
| 882 |
+
def _hf_hub_download_to_cache_dir(
|
| 883 |
+
*,
|
| 884 |
+
# Destination
|
| 885 |
+
cache_dir: str,
|
| 886 |
+
# File info
|
| 887 |
+
repo_id: str,
|
| 888 |
+
filename: str,
|
| 889 |
+
repo_type: str,
|
| 890 |
+
revision: str,
|
| 891 |
+
# HTTP info
|
| 892 |
+
endpoint: Optional[str],
|
| 893 |
+
etag_timeout: float,
|
| 894 |
+
headers: Dict[str, str],
|
| 895 |
+
proxies: Optional[Dict],
|
| 896 |
+
token: Optional[Union[bool, str]],
|
| 897 |
+
# Additional options
|
| 898 |
+
local_files_only: bool,
|
| 899 |
+
force_download: bool,
|
| 900 |
+
) -> str:
|
| 901 |
+
"""Download a given file to a cache folder, if not already present.
|
| 902 |
+
|
| 903 |
+
Method should not be called directly. Please use `hf_hub_download` instead.
|
| 904 |
+
"""
|
| 905 |
+
locks_dir = os.path.join(cache_dir, ".locks")
|
| 906 |
+
storage_folder = os.path.join(cache_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type))
|
| 907 |
+
|
| 908 |
+
# cross platform transcription of filename, to be used as a local file path.
|
| 909 |
+
relative_filename = os.path.join(*filename.split("/"))
|
| 910 |
+
if os.name == "nt":
|
| 911 |
+
if relative_filename.startswith("..\\") or "\\..\\" in relative_filename:
|
| 912 |
+
raise ValueError(
|
| 913 |
+
f"Invalid filename: cannot handle filename '{relative_filename}' on Windows. Please ask the repository"
|
| 914 |
+
" owner to rename this file."
|
| 915 |
+
)
|
| 916 |
+
|
| 917 |
+
# if user provides a commit_hash and they already have the file on disk, shortcut everything.
|
| 918 |
+
if REGEX_COMMIT_HASH.match(revision):
|
| 919 |
+
pointer_path = _get_pointer_path(storage_folder, revision, relative_filename)
|
| 920 |
+
if os.path.exists(pointer_path) and not force_download:
|
| 921 |
+
return pointer_path
|
| 922 |
+
|
| 923 |
+
# Try to get metadata (etag, commit_hash, url, size) from the server.
|
| 924 |
+
# If we can't, a HEAD request error is returned.
|
| 925 |
+
(url_to_download, etag, commit_hash, expected_size, head_call_error) = _get_metadata_or_catch_error(
|
| 926 |
+
repo_id=repo_id,
|
| 927 |
+
filename=filename,
|
| 928 |
+
repo_type=repo_type,
|
| 929 |
+
revision=revision,
|
| 930 |
+
endpoint=endpoint,
|
| 931 |
+
proxies=proxies,
|
| 932 |
+
etag_timeout=etag_timeout,
|
| 933 |
+
headers=headers,
|
| 934 |
+
token=token,
|
| 935 |
+
local_files_only=local_files_only,
|
| 936 |
+
storage_folder=storage_folder,
|
| 937 |
+
relative_filename=relative_filename,
|
| 938 |
+
)
|
| 939 |
+
|
| 940 |
+
# etag can be None for several reasons:
|
| 941 |
+
# 1. we passed local_files_only.
|
| 942 |
+
# 2. we don't have a connection
|
| 943 |
+
# 3. Hub is down (HTTP 500, 503, 504)
|
| 944 |
+
# 4. repo is not found -for example private or gated- and invalid/missing token sent
|
| 945 |
+
# 5. Hub is blocked by a firewall or proxy is not set correctly.
|
| 946 |
+
# => Try to get the last downloaded one from the specified revision.
|
| 947 |
+
#
|
| 948 |
+
# If the specified revision is a commit hash, look inside "snapshots".
|
| 949 |
+
# If the specified revision is a branch or tag, look inside "refs".
|
| 950 |
+
if head_call_error is not None:
|
| 951 |
+
# Couldn't make a HEAD call => let's try to find a local file
|
| 952 |
+
if not force_download:
|
| 953 |
+
commit_hash = None
|
| 954 |
+
if REGEX_COMMIT_HASH.match(revision):
|
| 955 |
+
commit_hash = revision
|
| 956 |
+
else:
|
| 957 |
+
ref_path = os.path.join(storage_folder, "refs", revision)
|
| 958 |
+
if os.path.isfile(ref_path):
|
| 959 |
+
with open(ref_path) as f:
|
| 960 |
+
commit_hash = f.read()
|
| 961 |
+
|
| 962 |
+
# Return pointer file if exists
|
| 963 |
+
if commit_hash is not None:
|
| 964 |
+
pointer_path = _get_pointer_path(storage_folder, commit_hash, relative_filename)
|
| 965 |
+
if os.path.exists(pointer_path) and not force_download:
|
| 966 |
+
return pointer_path
|
| 967 |
+
|
| 968 |
+
# Otherwise, raise appropriate error
|
| 969 |
+
_raise_on_head_call_error(head_call_error, force_download, local_files_only)
|
| 970 |
+
|
| 971 |
+
# From now on, etag, commit_hash, url and size are not None.
|
| 972 |
+
assert etag is not None, "etag must have been retrieved from server"
|
| 973 |
+
assert commit_hash is not None, "commit_hash must have been retrieved from server"
|
| 974 |
+
assert url_to_download is not None, "file location must have been retrieved from server"
|
| 975 |
+
assert expected_size is not None, "expected_size must have been retrieved from server"
|
| 976 |
+
blob_path = os.path.join(storage_folder, "blobs", etag)
|
| 977 |
+
pointer_path = _get_pointer_path(storage_folder, commit_hash, relative_filename)
|
| 978 |
+
|
| 979 |
+
os.makedirs(os.path.dirname(blob_path), exist_ok=True)
|
| 980 |
+
os.makedirs(os.path.dirname(pointer_path), exist_ok=True)
|
| 981 |
+
|
| 982 |
+
# if passed revision is not identical to commit_hash
|
| 983 |
+
# then revision has to be a branch name or tag name.
|
| 984 |
+
# In that case store a ref.
|
| 985 |
+
_cache_commit_hash_for_specific_revision(storage_folder, revision, commit_hash)
|
| 986 |
+
|
| 987 |
+
# If file already exists, return it (except if force_download=True)
|
| 988 |
+
if not force_download:
|
| 989 |
+
if os.path.exists(pointer_path):
|
| 990 |
+
return pointer_path
|
| 991 |
+
|
| 992 |
+
if os.path.exists(blob_path):
|
| 993 |
+
# we have the blob already, but not the pointer
|
| 994 |
+
_create_symlink(blob_path, pointer_path, new_blob=False)
|
| 995 |
+
return pointer_path
|
| 996 |
+
|
| 997 |
+
# Prevent parallel downloads of the same file with a lock.
|
| 998 |
+
# etag could be duplicated across repos,
|
| 999 |
+
lock_path = os.path.join(locks_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type), f"{etag}.lock")
|
| 1000 |
+
|
| 1001 |
+
# Some Windows versions do not allow for paths longer than 255 characters.
|
| 1002 |
+
# In this case, we must specify it as an extended path by using the "\\?\" prefix.
|
| 1003 |
+
if os.name == "nt" and len(os.path.abspath(lock_path)) > 255:
|
| 1004 |
+
lock_path = "\\\\?\\" + os.path.abspath(lock_path)
|
| 1005 |
+
|
| 1006 |
+
if os.name == "nt" and len(os.path.abspath(blob_path)) > 255:
|
| 1007 |
+
blob_path = "\\\\?\\" + os.path.abspath(blob_path)
|
| 1008 |
+
|
| 1009 |
+
Path(lock_path).parent.mkdir(parents=True, exist_ok=True)
|
| 1010 |
+
with WeakFileLock(lock_path):
|
| 1011 |
+
_download_to_tmp_and_move(
|
| 1012 |
+
incomplete_path=Path(blob_path + ".incomplete"),
|
| 1013 |
+
destination_path=Path(blob_path),
|
| 1014 |
+
url_to_download=url_to_download,
|
| 1015 |
+
proxies=proxies,
|
| 1016 |
+
headers=headers,
|
| 1017 |
+
expected_size=expected_size,
|
| 1018 |
+
filename=filename,
|
| 1019 |
+
force_download=force_download,
|
| 1020 |
+
)
|
| 1021 |
+
if not os.path.exists(pointer_path):
|
| 1022 |
+
_create_symlink(blob_path, pointer_path, new_blob=True)
|
| 1023 |
+
|
| 1024 |
+
return pointer_path
|
| 1025 |
+
|
| 1026 |
+
|
| 1027 |
+
def _hf_hub_download_to_local_dir(
|
| 1028 |
+
*,
|
| 1029 |
+
# Destination
|
| 1030 |
+
local_dir: Union[str, Path],
|
| 1031 |
+
# File info
|
| 1032 |
+
repo_id: str,
|
| 1033 |
+
repo_type: str,
|
| 1034 |
+
filename: str,
|
| 1035 |
+
revision: str,
|
| 1036 |
+
# HTTP info
|
| 1037 |
+
endpoint: Optional[str],
|
| 1038 |
+
etag_timeout: float,
|
| 1039 |
+
headers: Dict[str, str],
|
| 1040 |
+
proxies: Optional[Dict],
|
| 1041 |
+
token: Union[bool, str, None],
|
| 1042 |
+
# Additional options
|
| 1043 |
+
cache_dir: str,
|
| 1044 |
+
force_download: bool,
|
| 1045 |
+
local_files_only: bool,
|
| 1046 |
+
) -> str:
|
| 1047 |
+
"""Download a given file to a local folder, if not already present.
|
| 1048 |
+
|
| 1049 |
+
Method should not be called directly. Please use `hf_hub_download` instead.
|
| 1050 |
+
"""
|
| 1051 |
+
# Some Windows versions do not allow for paths longer than 255 characters.
|
| 1052 |
+
# In this case, we must specify it as an extended path by using the "\\?\" prefix.
|
| 1053 |
+
if os.name == "nt" and len(os.path.abspath(local_dir)) > 255:
|
| 1054 |
+
local_dir = "\\\\?\\" + os.path.abspath(local_dir)
|
| 1055 |
+
local_dir = Path(local_dir)
|
| 1056 |
+
paths = get_local_download_paths(local_dir=local_dir, filename=filename)
|
| 1057 |
+
local_metadata = read_download_metadata(local_dir=local_dir, filename=filename)
|
| 1058 |
+
|
| 1059 |
+
# Local file exists + metadata exists + commit_hash matches => return file
|
| 1060 |
+
if (
|
| 1061 |
+
not force_download
|
| 1062 |
+
and REGEX_COMMIT_HASH.match(revision)
|
| 1063 |
+
and paths.file_path.is_file()
|
| 1064 |
+
and local_metadata is not None
|
| 1065 |
+
and local_metadata.commit_hash == revision
|
| 1066 |
+
):
|
| 1067 |
+
return str(paths.file_path)
|
| 1068 |
+
|
| 1069 |
+
# Local file doesn't exist or commit_hash doesn't match => we need the etag
|
| 1070 |
+
(url_to_download, etag, commit_hash, expected_size, head_call_error) = _get_metadata_or_catch_error(
|
| 1071 |
+
repo_id=repo_id,
|
| 1072 |
+
filename=filename,
|
| 1073 |
+
repo_type=repo_type,
|
| 1074 |
+
revision=revision,
|
| 1075 |
+
endpoint=endpoint,
|
| 1076 |
+
proxies=proxies,
|
| 1077 |
+
etag_timeout=etag_timeout,
|
| 1078 |
+
headers=headers,
|
| 1079 |
+
token=token,
|
| 1080 |
+
local_files_only=local_files_only,
|
| 1081 |
+
)
|
| 1082 |
+
|
| 1083 |
+
if head_call_error is not None:
|
| 1084 |
+
# No HEAD call but local file exists => default to local file
|
| 1085 |
+
if not force_download and paths.file_path.is_file():
|
| 1086 |
+
logger.warning(
|
| 1087 |
+
f"Couldn't access the Hub to check for update but local file already exists. Defaulting to existing file. (error: {head_call_error})"
|
| 1088 |
+
)
|
| 1089 |
+
return str(paths.file_path)
|
| 1090 |
+
# Otherwise => raise
|
| 1091 |
+
_raise_on_head_call_error(head_call_error, force_download, local_files_only)
|
| 1092 |
+
|
| 1093 |
+
# From now on, etag, commit_hash, url and size are not None.
|
| 1094 |
+
assert etag is not None, "etag must have been retrieved from server"
|
| 1095 |
+
assert commit_hash is not None, "commit_hash must have been retrieved from server"
|
| 1096 |
+
assert url_to_download is not None, "file location must have been retrieved from server"
|
| 1097 |
+
assert expected_size is not None, "expected_size must have been retrieved from server"
|
| 1098 |
+
|
| 1099 |
+
# Local file exists => check if it's up-to-date
|
| 1100 |
+
if not force_download and paths.file_path.is_file():
|
| 1101 |
+
# etag matches => update metadata and return file
|
| 1102 |
+
if local_metadata is not None and local_metadata.etag == etag:
|
| 1103 |
+
write_download_metadata(local_dir=local_dir, filename=filename, commit_hash=commit_hash, etag=etag)
|
| 1104 |
+
return str(paths.file_path)
|
| 1105 |
+
|
| 1106 |
+
# metadata is outdated + etag is a sha256
|
| 1107 |
+
# => means it's an LFS file (large)
|
| 1108 |
+
# => let's compute local hash and compare
|
| 1109 |
+
# => if match, update metadata and return file
|
| 1110 |
+
if local_metadata is None and REGEX_SHA256.match(etag) is not None:
|
| 1111 |
+
with open(paths.file_path, "rb") as f:
|
| 1112 |
+
file_hash = sha_fileobj(f).hex()
|
| 1113 |
+
if file_hash == etag:
|
| 1114 |
+
write_download_metadata(local_dir=local_dir, filename=filename, commit_hash=commit_hash, etag=etag)
|
| 1115 |
+
return str(paths.file_path)
|
| 1116 |
+
|
| 1117 |
+
# Local file doesn't exist or etag isn't a match => retrieve file from remote (or cache)
|
| 1118 |
+
|
| 1119 |
+
# If we are lucky enough, the file is already in the cache => copy it
|
| 1120 |
+
if not force_download:
|
| 1121 |
+
cached_path = try_to_load_from_cache(
|
| 1122 |
+
repo_id=repo_id,
|
| 1123 |
+
filename=filename,
|
| 1124 |
+
cache_dir=cache_dir,
|
| 1125 |
+
revision=commit_hash,
|
| 1126 |
+
repo_type=repo_type,
|
| 1127 |
+
)
|
| 1128 |
+
if isinstance(cached_path, str):
|
| 1129 |
+
with WeakFileLock(paths.lock_path):
|
| 1130 |
+
paths.file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 1131 |
+
shutil.copyfile(cached_path, paths.file_path)
|
| 1132 |
+
write_download_metadata(local_dir=local_dir, filename=filename, commit_hash=commit_hash, etag=etag)
|
| 1133 |
+
return str(paths.file_path)
|
| 1134 |
+
|
| 1135 |
+
# Otherwise, let's download the file!
|
| 1136 |
+
with WeakFileLock(paths.lock_path):
|
| 1137 |
+
paths.file_path.unlink(missing_ok=True) # delete outdated file first
|
| 1138 |
+
_download_to_tmp_and_move(
|
| 1139 |
+
incomplete_path=paths.incomplete_path(etag),
|
| 1140 |
+
destination_path=paths.file_path,
|
| 1141 |
+
url_to_download=url_to_download,
|
| 1142 |
+
proxies=proxies,
|
| 1143 |
+
headers=headers,
|
| 1144 |
+
expected_size=expected_size,
|
| 1145 |
+
filename=filename,
|
| 1146 |
+
force_download=force_download,
|
| 1147 |
+
)
|
| 1148 |
+
|
| 1149 |
+
write_download_metadata(local_dir=local_dir, filename=filename, commit_hash=commit_hash, etag=etag)
|
| 1150 |
+
return str(paths.file_path)
|
| 1151 |
+
|
| 1152 |
+
|
| 1153 |
+
@validate_hf_hub_args
|
| 1154 |
+
def try_to_load_from_cache(
|
| 1155 |
+
repo_id: str,
|
| 1156 |
+
filename: str,
|
| 1157 |
+
cache_dir: Union[str, Path, None] = None,
|
| 1158 |
+
revision: Optional[str] = None,
|
| 1159 |
+
repo_type: Optional[str] = None,
|
| 1160 |
+
) -> Union[str, _CACHED_NO_EXIST_T, None]:
|
| 1161 |
+
"""
|
| 1162 |
+
Explores the cache to return the latest cached file for a given revision if found.
|
| 1163 |
+
|
| 1164 |
+
This function will not raise any exception if the file in not cached.
|
| 1165 |
+
|
| 1166 |
+
Args:
|
| 1167 |
+
cache_dir (`str` or `os.PathLike`):
|
| 1168 |
+
The folder where the cached files lie.
|
| 1169 |
+
repo_id (`str`):
|
| 1170 |
+
The ID of the repo on huggingface.co.
|
| 1171 |
+
filename (`str`):
|
| 1172 |
+
The filename to look for inside `repo_id`.
|
| 1173 |
+
revision (`str`, *optional*):
|
| 1174 |
+
The specific model version to use. Will default to `"main"` if it's not provided and no `commit_hash` is
|
| 1175 |
+
provided either.
|
| 1176 |
+
repo_type (`str`, *optional*):
|
| 1177 |
+
The type of the repository. Will default to `"model"`.
|
| 1178 |
+
|
| 1179 |
+
Returns:
|
| 1180 |
+
`Optional[str]` or `_CACHED_NO_EXIST`:
|
| 1181 |
+
Will return `None` if the file was not cached. Otherwise:
|
| 1182 |
+
- The exact path to the cached file if it's found in the cache
|
| 1183 |
+
- A special value `_CACHED_NO_EXIST` if the file does not exist at the given commit hash and this fact was
|
| 1184 |
+
cached.
|
| 1185 |
+
|
| 1186 |
+
Example:
|
| 1187 |
+
|
| 1188 |
+
```python
|
| 1189 |
+
from huggingface_hub import try_to_load_from_cache, _CACHED_NO_EXIST
|
| 1190 |
+
|
| 1191 |
+
filepath = try_to_load_from_cache()
|
| 1192 |
+
if isinstance(filepath, str):
|
| 1193 |
+
# file exists and is cached
|
| 1194 |
+
...
|
| 1195 |
+
elif filepath is _CACHED_NO_EXIST:
|
| 1196 |
+
# non-existence of file is cached
|
| 1197 |
+
...
|
| 1198 |
+
else:
|
| 1199 |
+
# file is not cached
|
| 1200 |
+
...
|
| 1201 |
+
```
|
| 1202 |
+
"""
|
| 1203 |
+
if revision is None:
|
| 1204 |
+
revision = "main"
|
| 1205 |
+
if repo_type is None:
|
| 1206 |
+
repo_type = "model"
|
| 1207 |
+
if repo_type not in constants.REPO_TYPES:
|
| 1208 |
+
raise ValueError(f"Invalid repo type: {repo_type}. Accepted repo types are: {str(constants.REPO_TYPES)}")
|
| 1209 |
+
if cache_dir is None:
|
| 1210 |
+
cache_dir = constants.HF_HUB_CACHE
|
| 1211 |
+
|
| 1212 |
+
object_id = repo_id.replace("/", "--")
|
| 1213 |
+
repo_cache = os.path.join(cache_dir, f"{repo_type}s--{object_id}")
|
| 1214 |
+
if not os.path.isdir(repo_cache):
|
| 1215 |
+
# No cache for this model
|
| 1216 |
+
return None
|
| 1217 |
+
|
| 1218 |
+
refs_dir = os.path.join(repo_cache, "refs")
|
| 1219 |
+
snapshots_dir = os.path.join(repo_cache, "snapshots")
|
| 1220 |
+
no_exist_dir = os.path.join(repo_cache, ".no_exist")
|
| 1221 |
+
|
| 1222 |
+
# Resolve refs (for instance to convert main to the associated commit sha)
|
| 1223 |
+
if os.path.isdir(refs_dir):
|
| 1224 |
+
revision_file = os.path.join(refs_dir, revision)
|
| 1225 |
+
if os.path.isfile(revision_file):
|
| 1226 |
+
with open(revision_file) as f:
|
| 1227 |
+
revision = f.read()
|
| 1228 |
+
|
| 1229 |
+
# Check if file is cached as "no_exist"
|
| 1230 |
+
if os.path.isfile(os.path.join(no_exist_dir, revision, filename)):
|
| 1231 |
+
return _CACHED_NO_EXIST
|
| 1232 |
+
|
| 1233 |
+
# Check if revision folder exists
|
| 1234 |
+
if not os.path.exists(snapshots_dir):
|
| 1235 |
+
return None
|
| 1236 |
+
cached_shas = os.listdir(snapshots_dir)
|
| 1237 |
+
if revision not in cached_shas:
|
| 1238 |
+
# No cache for this revision and we won't try to return a random revision
|
| 1239 |
+
return None
|
| 1240 |
+
|
| 1241 |
+
# Check if file exists in cache
|
| 1242 |
+
cached_file = os.path.join(snapshots_dir, revision, filename)
|
| 1243 |
+
return cached_file if os.path.isfile(cached_file) else None
|
| 1244 |
+
|
| 1245 |
+
|
| 1246 |
+
@validate_hf_hub_args
|
| 1247 |
+
def get_hf_file_metadata(
|
| 1248 |
+
url: str,
|
| 1249 |
+
token: Union[bool, str, None] = None,
|
| 1250 |
+
proxies: Optional[Dict] = None,
|
| 1251 |
+
timeout: Optional[float] = constants.DEFAULT_REQUEST_TIMEOUT,
|
| 1252 |
+
library_name: Optional[str] = None,
|
| 1253 |
+
library_version: Optional[str] = None,
|
| 1254 |
+
user_agent: Union[Dict, str, None] = None,
|
| 1255 |
+
headers: Optional[Dict[str, str]] = None,
|
| 1256 |
+
) -> HfFileMetadata:
|
| 1257 |
+
"""Fetch metadata of a file versioned on the Hub for a given url.
|
| 1258 |
+
|
| 1259 |
+
Args:
|
| 1260 |
+
url (`str`):
|
| 1261 |
+
File url, for example returned by [`hf_hub_url`].
|
| 1262 |
+
token (`str` or `bool`, *optional*):
|
| 1263 |
+
A token to be used for the download.
|
| 1264 |
+
- If `True`, the token is read from the HuggingFace config
|
| 1265 |
+
folder.
|
| 1266 |
+
- If `False` or `None`, no token is provided.
|
| 1267 |
+
- If a string, it's used as the authentication token.
|
| 1268 |
+
proxies (`dict`, *optional*):
|
| 1269 |
+
Dictionary mapping protocol to the URL of the proxy passed to
|
| 1270 |
+
`requests.request`.
|
| 1271 |
+
timeout (`float`, *optional*, defaults to 10):
|
| 1272 |
+
How many seconds to wait for the server to send metadata before giving up.
|
| 1273 |
+
library_name (`str`, *optional*):
|
| 1274 |
+
The name of the library to which the object corresponds.
|
| 1275 |
+
library_version (`str`, *optional*):
|
| 1276 |
+
The version of the library.
|
| 1277 |
+
user_agent (`dict`, `str`, *optional*):
|
| 1278 |
+
The user-agent info in the form of a dictionary or a string.
|
| 1279 |
+
headers (`dict`, *optional*):
|
| 1280 |
+
Additional headers to be sent with the request.
|
| 1281 |
+
|
| 1282 |
+
Returns:
|
| 1283 |
+
A [`HfFileMetadata`] object containing metadata such as location, etag, size and
|
| 1284 |
+
commit_hash.
|
| 1285 |
+
"""
|
| 1286 |
+
hf_headers = build_hf_headers(
|
| 1287 |
+
token=token,
|
| 1288 |
+
library_name=library_name,
|
| 1289 |
+
library_version=library_version,
|
| 1290 |
+
user_agent=user_agent,
|
| 1291 |
+
headers=headers,
|
| 1292 |
+
)
|
| 1293 |
+
hf_headers["Accept-Encoding"] = "identity" # prevent any compression => we want to know the real size of the file
|
| 1294 |
+
|
| 1295 |
+
# Retrieve metadata
|
| 1296 |
+
r = _request_wrapper(
|
| 1297 |
+
method="HEAD",
|
| 1298 |
+
url=url,
|
| 1299 |
+
headers=hf_headers,
|
| 1300 |
+
allow_redirects=False,
|
| 1301 |
+
follow_relative_redirects=True,
|
| 1302 |
+
proxies=proxies,
|
| 1303 |
+
timeout=timeout,
|
| 1304 |
+
)
|
| 1305 |
+
hf_raise_for_status(r)
|
| 1306 |
+
|
| 1307 |
+
# Return
|
| 1308 |
+
return HfFileMetadata(
|
| 1309 |
+
commit_hash=r.headers.get(constants.HUGGINGFACE_HEADER_X_REPO_COMMIT),
|
| 1310 |
+
# We favor a custom header indicating the etag of the linked resource, and
|
| 1311 |
+
# we fallback to the regular etag header.
|
| 1312 |
+
etag=_normalize_etag(r.headers.get(constants.HUGGINGFACE_HEADER_X_LINKED_ETAG) or r.headers.get("ETag")),
|
| 1313 |
+
# Either from response headers (if redirected) or defaults to request url
|
| 1314 |
+
# Do not use directly `url`, as `_request_wrapper` might have followed relative
|
| 1315 |
+
# redirects.
|
| 1316 |
+
location=r.headers.get("Location") or r.request.url, # type: ignore
|
| 1317 |
+
size=_int_or_none(
|
| 1318 |
+
r.headers.get(constants.HUGGINGFACE_HEADER_X_LINKED_SIZE) or r.headers.get("Content-Length")
|
| 1319 |
+
),
|
| 1320 |
+
)
|
| 1321 |
+
|
| 1322 |
+
|
| 1323 |
+
def _get_metadata_or_catch_error(
|
| 1324 |
+
*,
|
| 1325 |
+
repo_id: str,
|
| 1326 |
+
filename: str,
|
| 1327 |
+
repo_type: str,
|
| 1328 |
+
revision: str,
|
| 1329 |
+
endpoint: Optional[str],
|
| 1330 |
+
proxies: Optional[Dict],
|
| 1331 |
+
etag_timeout: Optional[float],
|
| 1332 |
+
headers: Dict[str, str], # mutated inplace!
|
| 1333 |
+
token: Union[bool, str, None],
|
| 1334 |
+
local_files_only: bool,
|
| 1335 |
+
relative_filename: Optional[str] = None, # only used to store `.no_exists` in cache
|
| 1336 |
+
storage_folder: Optional[str] = None, # only used to store `.no_exists` in cache
|
| 1337 |
+
) -> Union[
|
| 1338 |
+
# Either an exception is caught and returned
|
| 1339 |
+
Tuple[None, None, None, None, Exception],
|
| 1340 |
+
# Or the metadata is returned as
|
| 1341 |
+
# `(url_to_download, etag, commit_hash, expected_size, None)`
|
| 1342 |
+
Tuple[str, str, str, int, None],
|
| 1343 |
+
]:
|
| 1344 |
+
"""Get metadata for a file on the Hub, safely handling network issues.
|
| 1345 |
+
|
| 1346 |
+
Returns either the etag, commit_hash and expected size of the file, or the error
|
| 1347 |
+
raised while fetching the metadata.
|
| 1348 |
+
|
| 1349 |
+
NOTE: This function mutates `headers` inplace! It removes the `authorization` header
|
| 1350 |
+
if the file is a LFS blob and the domain of the url is different from the
|
| 1351 |
+
domain of the location (typically an S3 bucket).
|
| 1352 |
+
"""
|
| 1353 |
+
if local_files_only:
|
| 1354 |
+
return (
|
| 1355 |
+
None,
|
| 1356 |
+
None,
|
| 1357 |
+
None,
|
| 1358 |
+
None,
|
| 1359 |
+
OfflineModeIsEnabled(
|
| 1360 |
+
f"Cannot access file since 'local_files_only=True' as been set. (repo_id: {repo_id}, repo_type: {repo_type}, revision: {revision}, filename: {filename})"
|
| 1361 |
+
),
|
| 1362 |
+
)
|
| 1363 |
+
|
| 1364 |
+
url = hf_hub_url(repo_id, filename, repo_type=repo_type, revision=revision, endpoint=endpoint)
|
| 1365 |
+
url_to_download: str = url
|
| 1366 |
+
etag: Optional[str] = None
|
| 1367 |
+
commit_hash: Optional[str] = None
|
| 1368 |
+
expected_size: Optional[int] = None
|
| 1369 |
+
head_error_call: Optional[Exception] = None
|
| 1370 |
+
|
| 1371 |
+
# Try to get metadata from the server.
|
| 1372 |
+
# Do not raise yet if the file is not found or not accessible.
|
| 1373 |
+
if not local_files_only:
|
| 1374 |
+
try:
|
| 1375 |
+
try:
|
| 1376 |
+
metadata = get_hf_file_metadata(
|
| 1377 |
+
url=url, proxies=proxies, timeout=etag_timeout, headers=headers, token=token
|
| 1378 |
+
)
|
| 1379 |
+
except EntryNotFoundError as http_error:
|
| 1380 |
+
if storage_folder is not None and relative_filename is not None:
|
| 1381 |
+
# Cache the non-existence of the file
|
| 1382 |
+
commit_hash = http_error.response.headers.get(constants.HUGGINGFACE_HEADER_X_REPO_COMMIT)
|
| 1383 |
+
if commit_hash is not None:
|
| 1384 |
+
no_exist_file_path = Path(storage_folder) / ".no_exist" / commit_hash / relative_filename
|
| 1385 |
+
try:
|
| 1386 |
+
no_exist_file_path.parent.mkdir(parents=True, exist_ok=True)
|
| 1387 |
+
no_exist_file_path.touch()
|
| 1388 |
+
except OSError as e:
|
| 1389 |
+
logger.error(
|
| 1390 |
+
f"Could not cache non-existence of file. Will ignore error and continue. Error: {e}"
|
| 1391 |
+
)
|
| 1392 |
+
_cache_commit_hash_for_specific_revision(storage_folder, revision, commit_hash)
|
| 1393 |
+
raise
|
| 1394 |
+
|
| 1395 |
+
# Commit hash must exist
|
| 1396 |
+
commit_hash = metadata.commit_hash
|
| 1397 |
+
if commit_hash is None:
|
| 1398 |
+
raise FileMetadataError(
|
| 1399 |
+
"Distant resource does not seem to be on huggingface.co. It is possible that a configuration issue"
|
| 1400 |
+
" prevents you from downloading resources from https://huggingface.co. Please check your firewall"
|
| 1401 |
+
" and proxy settings and make sure your SSL certificates are updated."
|
| 1402 |
+
)
|
| 1403 |
+
|
| 1404 |
+
# Etag must exist
|
| 1405 |
+
# If we don't have any of those, raise an error.
|
| 1406 |
+
etag = metadata.etag
|
| 1407 |
+
if etag is None:
|
| 1408 |
+
raise FileMetadataError(
|
| 1409 |
+
"Distant resource does not have an ETag, we won't be able to reliably ensure reproducibility."
|
| 1410 |
+
)
|
| 1411 |
+
|
| 1412 |
+
# Size must exist
|
| 1413 |
+
expected_size = metadata.size
|
| 1414 |
+
if expected_size is None:
|
| 1415 |
+
raise FileMetadataError("Distant resource does not have a Content-Length.")
|
| 1416 |
+
|
| 1417 |
+
# In case of a redirect, save an extra redirect on the request.get call,
|
| 1418 |
+
# and ensure we download the exact atomic version even if it changed
|
| 1419 |
+
# between the HEAD and the GET (unlikely, but hey).
|
| 1420 |
+
#
|
| 1421 |
+
# If url domain is different => we are downloading from a CDN => url is signed => don't send auth
|
| 1422 |
+
# If url domain is the same => redirect due to repo rename AND downloading a regular file => keep auth
|
| 1423 |
+
if url != metadata.location:
|
| 1424 |
+
url_to_download = metadata.location
|
| 1425 |
+
if urlparse(url).netloc != urlparse(metadata.location).netloc:
|
| 1426 |
+
# Remove authorization header when downloading a LFS blob
|
| 1427 |
+
headers.pop("authorization", None)
|
| 1428 |
+
except (requests.exceptions.SSLError, requests.exceptions.ProxyError):
|
| 1429 |
+
# Actually raise for those subclasses of ConnectionError
|
| 1430 |
+
raise
|
| 1431 |
+
except (
|
| 1432 |
+
requests.exceptions.ConnectionError,
|
| 1433 |
+
requests.exceptions.Timeout,
|
| 1434 |
+
OfflineModeIsEnabled,
|
| 1435 |
+
) as error:
|
| 1436 |
+
# Otherwise, our Internet connection is down.
|
| 1437 |
+
# etag is None
|
| 1438 |
+
head_error_call = error
|
| 1439 |
+
except (RevisionNotFoundError, EntryNotFoundError):
|
| 1440 |
+
# The repo was found but the revision or entry doesn't exist on the Hub (never existed or got deleted)
|
| 1441 |
+
raise
|
| 1442 |
+
except requests.HTTPError as error:
|
| 1443 |
+
# Multiple reasons for an http error:
|
| 1444 |
+
# - Repository is private and invalid/missing token sent
|
| 1445 |
+
# - Repository is gated and invalid/missing token sent
|
| 1446 |
+
# - Hub is down (error 500 or 504)
|
| 1447 |
+
# => let's switch to 'local_files_only=True' to check if the files are already cached.
|
| 1448 |
+
# (if it's not the case, the error will be re-raised)
|
| 1449 |
+
head_error_call = error
|
| 1450 |
+
except FileMetadataError as error:
|
| 1451 |
+
# Multiple reasons for a FileMetadataError:
|
| 1452 |
+
# - Wrong network configuration (proxy, firewall, SSL certificates)
|
| 1453 |
+
# - Inconsistency on the Hub
|
| 1454 |
+
# => let's switch to 'local_files_only=True' to check if the files are already cached.
|
| 1455 |
+
# (if it's not the case, the error will be re-raised)
|
| 1456 |
+
head_error_call = error
|
| 1457 |
+
|
| 1458 |
+
if not (local_files_only or etag is not None or head_error_call is not None):
|
| 1459 |
+
raise RuntimeError("etag is empty due to uncovered problems")
|
| 1460 |
+
|
| 1461 |
+
return (url_to_download, etag, commit_hash, expected_size, head_error_call) # type: ignore [return-value]
|
| 1462 |
+
|
| 1463 |
+
|
| 1464 |
+
def _raise_on_head_call_error(head_call_error: Exception, force_download: bool, local_files_only: bool) -> NoReturn:
|
| 1465 |
+
"""Raise an appropriate error when the HEAD call failed and we cannot locate a local file."""
|
| 1466 |
+
# No head call => we cannot force download.
|
| 1467 |
+
if force_download:
|
| 1468 |
+
if local_files_only:
|
| 1469 |
+
raise ValueError("Cannot pass 'force_download=True' and 'local_files_only=True' at the same time.")
|
| 1470 |
+
elif isinstance(head_call_error, OfflineModeIsEnabled):
|
| 1471 |
+
raise ValueError("Cannot pass 'force_download=True' when offline mode is enabled.") from head_call_error
|
| 1472 |
+
else:
|
| 1473 |
+
raise ValueError("Force download failed due to the above error.") from head_call_error
|
| 1474 |
+
|
| 1475 |
+
# No head call + couldn't find an appropriate file on disk => raise an error.
|
| 1476 |
+
if local_files_only:
|
| 1477 |
+
raise LocalEntryNotFoundError(
|
| 1478 |
+
"Cannot find the requested files in the disk cache and outgoing traffic has been disabled. To enable"
|
| 1479 |
+
" hf.co look-ups and downloads online, set 'local_files_only' to False."
|
| 1480 |
+
)
|
| 1481 |
+
elif isinstance(head_call_error, (RepositoryNotFoundError, GatedRepoError)) or (
|
| 1482 |
+
isinstance(head_call_error, HfHubHTTPError) and head_call_error.response.status_code == 401
|
| 1483 |
+
):
|
| 1484 |
+
# Repo not found or gated => let's raise the actual error
|
| 1485 |
+
# Unauthorized => likely a token issue => let's raise the actual error
|
| 1486 |
+
raise head_call_error
|
| 1487 |
+
else:
|
| 1488 |
+
# Otherwise: most likely a connection issue or Hub downtime => let's warn the user
|
| 1489 |
+
raise LocalEntryNotFoundError(
|
| 1490 |
+
"An error happened while trying to locate the file on the Hub and we cannot find the requested files"
|
| 1491 |
+
" in the local cache. Please check your connection and try again or make sure your Internet connection"
|
| 1492 |
+
" is on."
|
| 1493 |
+
) from head_call_error
|
| 1494 |
+
|
| 1495 |
+
|
| 1496 |
+
def _download_to_tmp_and_move(
|
| 1497 |
+
incomplete_path: Path,
|
| 1498 |
+
destination_path: Path,
|
| 1499 |
+
url_to_download: str,
|
| 1500 |
+
proxies: Optional[Dict],
|
| 1501 |
+
headers: Dict[str, str],
|
| 1502 |
+
expected_size: Optional[int],
|
| 1503 |
+
filename: str,
|
| 1504 |
+
force_download: bool,
|
| 1505 |
+
) -> None:
|
| 1506 |
+
"""Download content from a URL to a destination path.
|
| 1507 |
+
|
| 1508 |
+
Internal logic:
|
| 1509 |
+
- return early if file is already downloaded
|
| 1510 |
+
- resume download if possible (from incomplete file)
|
| 1511 |
+
- do not resume download if `force_download=True` or `HF_HUB_ENABLE_HF_TRANSFER=True`
|
| 1512 |
+
- check disk space before downloading
|
| 1513 |
+
- download content to a temporary file
|
| 1514 |
+
- set correct permissions on temporary file
|
| 1515 |
+
- move the temporary file to the destination path
|
| 1516 |
+
|
| 1517 |
+
Both `incomplete_path` and `destination_path` must be on the same volume to avoid a local copy.
|
| 1518 |
+
"""
|
| 1519 |
+
if destination_path.exists() and not force_download:
|
| 1520 |
+
# Do nothing if already exists (except if force_download=True)
|
| 1521 |
+
return
|
| 1522 |
+
|
| 1523 |
+
if incomplete_path.exists() and (force_download or (constants.HF_HUB_ENABLE_HF_TRANSFER and not proxies)):
|
| 1524 |
+
# By default, we will try to resume the download if possible.
|
| 1525 |
+
# However, if the user has set `force_download=True` or if `hf_transfer` is enabled, then we should
|
| 1526 |
+
# not resume the download => delete the incomplete file.
|
| 1527 |
+
message = f"Removing incomplete file '{incomplete_path}'"
|
| 1528 |
+
if force_download:
|
| 1529 |
+
message += " (force_download=True)"
|
| 1530 |
+
elif constants.HF_HUB_ENABLE_HF_TRANSFER and not proxies:
|
| 1531 |
+
message += " (hf_transfer=True)"
|
| 1532 |
+
logger.info(message)
|
| 1533 |
+
incomplete_path.unlink(missing_ok=True)
|
| 1534 |
+
|
| 1535 |
+
with incomplete_path.open("ab") as f:
|
| 1536 |
+
resume_size = f.tell()
|
| 1537 |
+
message = f"Downloading '{filename}' to '{incomplete_path}'"
|
| 1538 |
+
if resume_size > 0 and expected_size is not None:
|
| 1539 |
+
message += f" (resume from {resume_size}/{expected_size})"
|
| 1540 |
+
logger.info(message)
|
| 1541 |
+
|
| 1542 |
+
if expected_size is not None: # might be None if HTTP header not set correctly
|
| 1543 |
+
# Check disk space in both tmp and destination path
|
| 1544 |
+
_check_disk_space(expected_size, incomplete_path.parent)
|
| 1545 |
+
_check_disk_space(expected_size, destination_path.parent)
|
| 1546 |
+
|
| 1547 |
+
http_get(
|
| 1548 |
+
url_to_download,
|
| 1549 |
+
f,
|
| 1550 |
+
proxies=proxies,
|
| 1551 |
+
resume_size=resume_size,
|
| 1552 |
+
headers=headers,
|
| 1553 |
+
expected_size=expected_size,
|
| 1554 |
+
)
|
| 1555 |
+
|
| 1556 |
+
logger.info(f"Download complete. Moving file to {destination_path}")
|
| 1557 |
+
_chmod_and_move(incomplete_path, destination_path)
|
| 1558 |
+
|
| 1559 |
+
|
| 1560 |
+
def _int_or_none(value: Optional[str]) -> Optional[int]:
|
| 1561 |
+
try:
|
| 1562 |
+
return int(value) # type: ignore
|
| 1563 |
+
except (TypeError, ValueError):
|
| 1564 |
+
return None
|
| 1565 |
+
|
| 1566 |
+
|
| 1567 |
+
def _chmod_and_move(src: Path, dst: Path) -> None:
|
| 1568 |
+
"""Set correct permission before moving a blob from tmp directory to cache dir.
|
| 1569 |
+
|
| 1570 |
+
Do not take into account the `umask` from the process as there is no convenient way
|
| 1571 |
+
to get it that is thread-safe.
|
| 1572 |
+
|
| 1573 |
+
See:
|
| 1574 |
+
- About umask: https://docs.python.org/3/library/os.html#os.umask
|
| 1575 |
+
- Thread-safety: https://stackoverflow.com/a/70343066
|
| 1576 |
+
- About solution: https://github.com/huggingface/huggingface_hub/pull/1220#issuecomment-1326211591
|
| 1577 |
+
- Fix issue: https://github.com/huggingface/huggingface_hub/issues/1141
|
| 1578 |
+
- Fix issue: https://github.com/huggingface/huggingface_hub/issues/1215
|
| 1579 |
+
"""
|
| 1580 |
+
# Get umask by creating a temporary file in the cached repo folder.
|
| 1581 |
+
tmp_file = dst.parent.parent / f"tmp_{uuid.uuid4()}"
|
| 1582 |
+
try:
|
| 1583 |
+
tmp_file.touch()
|
| 1584 |
+
cache_dir_mode = Path(tmp_file).stat().st_mode
|
| 1585 |
+
os.chmod(str(src), stat.S_IMODE(cache_dir_mode))
|
| 1586 |
+
except OSError as e:
|
| 1587 |
+
logger.warning(
|
| 1588 |
+
f"Could not set the permissions on the file '{src}'. Error: {e}.\nContinuing without setting permissions."
|
| 1589 |
+
)
|
| 1590 |
+
finally:
|
| 1591 |
+
try:
|
| 1592 |
+
tmp_file.unlink()
|
| 1593 |
+
except OSError:
|
| 1594 |
+
# fails if `tmp_file.touch()` failed => do nothing
|
| 1595 |
+
# See https://github.com/huggingface/huggingface_hub/issues/2359
|
| 1596 |
+
pass
|
| 1597 |
+
|
| 1598 |
+
shutil.move(str(src), str(dst), copy_function=_copy_no_matter_what)
|
| 1599 |
+
|
| 1600 |
+
|
| 1601 |
+
def _copy_no_matter_what(src: str, dst: str) -> None:
|
| 1602 |
+
"""Copy file from src to dst.
|
| 1603 |
+
|
| 1604 |
+
If `shutil.copy2` fails, fallback to `shutil.copyfile`.
|
| 1605 |
+
"""
|
| 1606 |
+
try:
|
| 1607 |
+
# Copy file with metadata and permission
|
| 1608 |
+
# Can fail e.g. if dst is an S3 mount
|
| 1609 |
+
shutil.copy2(src, dst)
|
| 1610 |
+
except OSError:
|
| 1611 |
+
# Copy only file content
|
| 1612 |
+
shutil.copyfile(src, dst)
|
| 1613 |
+
|
| 1614 |
+
|
| 1615 |
+
def _get_pointer_path(storage_folder: str, revision: str, relative_filename: str) -> str:
|
| 1616 |
+
# Using `os.path.abspath` instead of `Path.resolve()` to avoid resolving symlinks
|
| 1617 |
+
snapshot_path = os.path.join(storage_folder, "snapshots")
|
| 1618 |
+
pointer_path = os.path.join(snapshot_path, revision, relative_filename)
|
| 1619 |
+
if Path(os.path.abspath(snapshot_path)) not in Path(os.path.abspath(pointer_path)).parents:
|
| 1620 |
+
raise ValueError(
|
| 1621 |
+
"Invalid pointer path: cannot create pointer path in snapshot folder if"
|
| 1622 |
+
f" `storage_folder='{storage_folder}'`, `revision='{revision}'` and"
|
| 1623 |
+
f" `relative_filename='{relative_filename}'`."
|
| 1624 |
+
)
|
| 1625 |
+
return pointer_path
|
parrot/lib/python3.10/site-packages/huggingface_hub/hf_api.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py
ADDED
|
@@ -0,0 +1,1140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import re
|
| 3 |
+
import tempfile
|
| 4 |
+
from collections import deque
|
| 5 |
+
from dataclasses import dataclass, field
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
from itertools import chain
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import Any, Dict, Iterator, List, NoReturn, Optional, Tuple, Union
|
| 10 |
+
from urllib.parse import quote, unquote
|
| 11 |
+
|
| 12 |
+
import fsspec
|
| 13 |
+
from fsspec.callbacks import _DEFAULT_CALLBACK, NoOpCallback, TqdmCallback
|
| 14 |
+
from fsspec.utils import isfilelike
|
| 15 |
+
from requests import Response
|
| 16 |
+
|
| 17 |
+
from . import constants
|
| 18 |
+
from ._commit_api import CommitOperationCopy, CommitOperationDelete
|
| 19 |
+
from .errors import EntryNotFoundError, RepositoryNotFoundError, RevisionNotFoundError
|
| 20 |
+
from .file_download import hf_hub_url, http_get
|
| 21 |
+
from .hf_api import HfApi, LastCommitInfo, RepoFile
|
| 22 |
+
from .utils import HFValidationError, hf_raise_for_status, http_backoff
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# Regex used to match special revisions with "/" in them (see #1710)
|
| 26 |
+
SPECIAL_REFS_REVISION_REGEX = re.compile(
|
| 27 |
+
r"""
|
| 28 |
+
(^refs\/convert\/\w+) # `refs/convert/parquet` revisions
|
| 29 |
+
|
|
| 30 |
+
(^refs\/pr\/\d+) # PR revisions
|
| 31 |
+
""",
|
| 32 |
+
re.VERBOSE,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
@dataclass
|
| 37 |
+
class HfFileSystemResolvedPath:
|
| 38 |
+
"""Data structure containing information about a resolved Hugging Face file system path."""
|
| 39 |
+
|
| 40 |
+
repo_type: str
|
| 41 |
+
repo_id: str
|
| 42 |
+
revision: str
|
| 43 |
+
path_in_repo: str
|
| 44 |
+
# The part placed after '@' in the initial path. It can be a quoted or unquoted refs revision.
|
| 45 |
+
# Used to reconstruct the unresolved path to return to the user.
|
| 46 |
+
_raw_revision: Optional[str] = field(default=None, repr=False)
|
| 47 |
+
|
| 48 |
+
def unresolve(self) -> str:
|
| 49 |
+
repo_path = constants.REPO_TYPES_URL_PREFIXES.get(self.repo_type, "") + self.repo_id
|
| 50 |
+
if self._raw_revision:
|
| 51 |
+
return f"{repo_path}@{self._raw_revision}/{self.path_in_repo}".rstrip("/")
|
| 52 |
+
elif self.revision != constants.DEFAULT_REVISION:
|
| 53 |
+
return f"{repo_path}@{safe_revision(self.revision)}/{self.path_in_repo}".rstrip("/")
|
| 54 |
+
else:
|
| 55 |
+
return f"{repo_path}/{self.path_in_repo}".rstrip("/")
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class HfFileSystem(fsspec.AbstractFileSystem):
|
| 59 |
+
"""
|
| 60 |
+
Access a remote Hugging Face Hub repository as if were a local file system.
|
| 61 |
+
|
| 62 |
+
<Tip warning={true}>
|
| 63 |
+
|
| 64 |
+
[`HfFileSystem`] provides fsspec compatibility, which is useful for libraries that require it (e.g., reading
|
| 65 |
+
Hugging Face datasets directly with `pandas`). However, it introduces additional overhead due to this compatibility
|
| 66 |
+
layer. For better performance and reliability, it's recommended to use `HfApi` methods when possible.
|
| 67 |
+
|
| 68 |
+
</Tip>
|
| 69 |
+
|
| 70 |
+
Args:
|
| 71 |
+
token (`str` or `bool`, *optional*):
|
| 72 |
+
A valid user access token (string). Defaults to the locally saved
|
| 73 |
+
token, which is the recommended method for authentication (see
|
| 74 |
+
https://huggingface.co/docs/huggingface_hub/quick-start#authentication).
|
| 75 |
+
To disable authentication, pass `False`.
|
| 76 |
+
endpoint (`str`, *optional*):
|
| 77 |
+
Endpoint of the Hub. Defaults to <https://huggingface.co>.
|
| 78 |
+
Usage:
|
| 79 |
+
|
| 80 |
+
```python
|
| 81 |
+
>>> from huggingface_hub import HfFileSystem
|
| 82 |
+
|
| 83 |
+
>>> fs = HfFileSystem()
|
| 84 |
+
|
| 85 |
+
>>> # List files
|
| 86 |
+
>>> fs.glob("my-username/my-model/*.bin")
|
| 87 |
+
['my-username/my-model/pytorch_model.bin']
|
| 88 |
+
>>> fs.ls("datasets/my-username/my-dataset", detail=False)
|
| 89 |
+
['datasets/my-username/my-dataset/.gitattributes', 'datasets/my-username/my-dataset/README.md', 'datasets/my-username/my-dataset/data.json']
|
| 90 |
+
|
| 91 |
+
>>> # Read/write files
|
| 92 |
+
>>> with fs.open("my-username/my-model/pytorch_model.bin") as f:
|
| 93 |
+
... data = f.read()
|
| 94 |
+
>>> with fs.open("my-username/my-model/pytorch_model.bin", "wb") as f:
|
| 95 |
+
... f.write(data)
|
| 96 |
+
```
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
root_marker = ""
|
| 100 |
+
protocol = "hf"
|
| 101 |
+
|
| 102 |
+
def __init__(
|
| 103 |
+
self,
|
| 104 |
+
*args,
|
| 105 |
+
endpoint: Optional[str] = None,
|
| 106 |
+
token: Union[bool, str, None] = None,
|
| 107 |
+
**storage_options,
|
| 108 |
+
):
|
| 109 |
+
super().__init__(*args, **storage_options)
|
| 110 |
+
self.endpoint = endpoint or constants.ENDPOINT
|
| 111 |
+
self.token = token
|
| 112 |
+
self._api = HfApi(endpoint=endpoint, token=token)
|
| 113 |
+
# Maps (repo_type, repo_id, revision) to a 2-tuple with:
|
| 114 |
+
# * the 1st element indicating whether the repositoy and the revision exist
|
| 115 |
+
# * the 2nd element being the exception raised if the repository or revision doesn't exist
|
| 116 |
+
self._repo_and_revision_exists_cache: Dict[
|
| 117 |
+
Tuple[str, str, Optional[str]], Tuple[bool, Optional[Exception]]
|
| 118 |
+
] = {}
|
| 119 |
+
|
| 120 |
+
def _repo_and_revision_exist(
|
| 121 |
+
self, repo_type: str, repo_id: str, revision: Optional[str]
|
| 122 |
+
) -> Tuple[bool, Optional[Exception]]:
|
| 123 |
+
if (repo_type, repo_id, revision) not in self._repo_and_revision_exists_cache:
|
| 124 |
+
try:
|
| 125 |
+
self._api.repo_info(
|
| 126 |
+
repo_id, revision=revision, repo_type=repo_type, timeout=constants.HF_HUB_ETAG_TIMEOUT
|
| 127 |
+
)
|
| 128 |
+
except (RepositoryNotFoundError, HFValidationError) as e:
|
| 129 |
+
self._repo_and_revision_exists_cache[(repo_type, repo_id, revision)] = False, e
|
| 130 |
+
self._repo_and_revision_exists_cache[(repo_type, repo_id, None)] = False, e
|
| 131 |
+
except RevisionNotFoundError as e:
|
| 132 |
+
self._repo_and_revision_exists_cache[(repo_type, repo_id, revision)] = False, e
|
| 133 |
+
self._repo_and_revision_exists_cache[(repo_type, repo_id, None)] = True, None
|
| 134 |
+
else:
|
| 135 |
+
self._repo_and_revision_exists_cache[(repo_type, repo_id, revision)] = True, None
|
| 136 |
+
self._repo_and_revision_exists_cache[(repo_type, repo_id, None)] = True, None
|
| 137 |
+
return self._repo_and_revision_exists_cache[(repo_type, repo_id, revision)]
|
| 138 |
+
|
| 139 |
+
def resolve_path(self, path: str, revision: Optional[str] = None) -> HfFileSystemResolvedPath:
|
| 140 |
+
"""
|
| 141 |
+
Resolve a Hugging Face file system path into its components.
|
| 142 |
+
|
| 143 |
+
Args:
|
| 144 |
+
path (`str`):
|
| 145 |
+
Path to resolve.
|
| 146 |
+
revision (`str`, *optional*):
|
| 147 |
+
The revision of the repo to resolve. Defaults to the revision specified in the path.
|
| 148 |
+
|
| 149 |
+
Returns:
|
| 150 |
+
[`HfFileSystemResolvedPath`]: Resolved path information containing `repo_type`, `repo_id`, `revision` and `path_in_repo`.
|
| 151 |
+
|
| 152 |
+
Raises:
|
| 153 |
+
`ValueError`:
|
| 154 |
+
If path contains conflicting revision information.
|
| 155 |
+
`NotImplementedError`:
|
| 156 |
+
If trying to list repositories.
|
| 157 |
+
"""
|
| 158 |
+
|
| 159 |
+
def _align_revision_in_path_with_revision(
|
| 160 |
+
revision_in_path: Optional[str], revision: Optional[str]
|
| 161 |
+
) -> Optional[str]:
|
| 162 |
+
if revision is not None:
|
| 163 |
+
if revision_in_path is not None and revision_in_path != revision:
|
| 164 |
+
raise ValueError(
|
| 165 |
+
f'Revision specified in path ("{revision_in_path}") and in `revision` argument ("{revision}")'
|
| 166 |
+
" are not the same."
|
| 167 |
+
)
|
| 168 |
+
else:
|
| 169 |
+
revision = revision_in_path
|
| 170 |
+
return revision
|
| 171 |
+
|
| 172 |
+
path = self._strip_protocol(path)
|
| 173 |
+
if not path:
|
| 174 |
+
# can't list repositories at root
|
| 175 |
+
raise NotImplementedError("Access to repositories lists is not implemented.")
|
| 176 |
+
elif path.split("/")[0] + "/" in constants.REPO_TYPES_URL_PREFIXES.values():
|
| 177 |
+
if "/" not in path:
|
| 178 |
+
# can't list repositories at the repository type level
|
| 179 |
+
raise NotImplementedError("Access to repositories lists is not implemented.")
|
| 180 |
+
repo_type, path = path.split("/", 1)
|
| 181 |
+
repo_type = constants.REPO_TYPES_MAPPING[repo_type]
|
| 182 |
+
else:
|
| 183 |
+
repo_type = constants.REPO_TYPE_MODEL
|
| 184 |
+
if path.count("/") > 0:
|
| 185 |
+
if "@" in path:
|
| 186 |
+
repo_id, revision_in_path = path.split("@", 1)
|
| 187 |
+
if "/" in revision_in_path:
|
| 188 |
+
match = SPECIAL_REFS_REVISION_REGEX.search(revision_in_path)
|
| 189 |
+
if match is not None and revision in (None, match.group()):
|
| 190 |
+
# Handle `refs/convert/parquet` and PR revisions separately
|
| 191 |
+
path_in_repo = SPECIAL_REFS_REVISION_REGEX.sub("", revision_in_path).lstrip("/")
|
| 192 |
+
revision_in_path = match.group()
|
| 193 |
+
else:
|
| 194 |
+
revision_in_path, path_in_repo = revision_in_path.split("/", 1)
|
| 195 |
+
else:
|
| 196 |
+
path_in_repo = ""
|
| 197 |
+
revision = _align_revision_in_path_with_revision(unquote(revision_in_path), revision)
|
| 198 |
+
repo_and_revision_exist, err = self._repo_and_revision_exist(repo_type, repo_id, revision)
|
| 199 |
+
if not repo_and_revision_exist:
|
| 200 |
+
_raise_file_not_found(path, err)
|
| 201 |
+
else:
|
| 202 |
+
revision_in_path = None
|
| 203 |
+
repo_id_with_namespace = "/".join(path.split("/")[:2])
|
| 204 |
+
path_in_repo_with_namespace = "/".join(path.split("/")[2:])
|
| 205 |
+
repo_id_without_namespace = path.split("/")[0]
|
| 206 |
+
path_in_repo_without_namespace = "/".join(path.split("/")[1:])
|
| 207 |
+
repo_id = repo_id_with_namespace
|
| 208 |
+
path_in_repo = path_in_repo_with_namespace
|
| 209 |
+
repo_and_revision_exist, err = self._repo_and_revision_exist(repo_type, repo_id, revision)
|
| 210 |
+
if not repo_and_revision_exist:
|
| 211 |
+
if isinstance(err, (RepositoryNotFoundError, HFValidationError)):
|
| 212 |
+
repo_id = repo_id_without_namespace
|
| 213 |
+
path_in_repo = path_in_repo_without_namespace
|
| 214 |
+
repo_and_revision_exist, _ = self._repo_and_revision_exist(repo_type, repo_id, revision)
|
| 215 |
+
if not repo_and_revision_exist:
|
| 216 |
+
_raise_file_not_found(path, err)
|
| 217 |
+
else:
|
| 218 |
+
_raise_file_not_found(path, err)
|
| 219 |
+
else:
|
| 220 |
+
repo_id = path
|
| 221 |
+
path_in_repo = ""
|
| 222 |
+
if "@" in path:
|
| 223 |
+
repo_id, revision_in_path = path.split("@", 1)
|
| 224 |
+
revision = _align_revision_in_path_with_revision(unquote(revision_in_path), revision)
|
| 225 |
+
else:
|
| 226 |
+
revision_in_path = None
|
| 227 |
+
repo_and_revision_exist, _ = self._repo_and_revision_exist(repo_type, repo_id, revision)
|
| 228 |
+
if not repo_and_revision_exist:
|
| 229 |
+
raise NotImplementedError("Access to repositories lists is not implemented.")
|
| 230 |
+
|
| 231 |
+
revision = revision if revision is not None else constants.DEFAULT_REVISION
|
| 232 |
+
return HfFileSystemResolvedPath(repo_type, repo_id, revision, path_in_repo, _raw_revision=revision_in_path)
|
| 233 |
+
|
| 234 |
+
def invalidate_cache(self, path: Optional[str] = None) -> None:
|
| 235 |
+
"""
|
| 236 |
+
Clear the cache for a given path.
|
| 237 |
+
|
| 238 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.invalidate_cache).
|
| 239 |
+
|
| 240 |
+
Args:
|
| 241 |
+
path (`str`, *optional*):
|
| 242 |
+
Path to clear from cache. If not provided, clear the entire cache.
|
| 243 |
+
|
| 244 |
+
"""
|
| 245 |
+
if not path:
|
| 246 |
+
self.dircache.clear()
|
| 247 |
+
self._repo_and_revision_exists_cache.clear()
|
| 248 |
+
else:
|
| 249 |
+
resolved_path = self.resolve_path(path)
|
| 250 |
+
path = resolved_path.unresolve()
|
| 251 |
+
while path:
|
| 252 |
+
self.dircache.pop(path, None)
|
| 253 |
+
path = self._parent(path)
|
| 254 |
+
|
| 255 |
+
# Only clear repo cache if path is to repo root
|
| 256 |
+
if not resolved_path.path_in_repo:
|
| 257 |
+
self._repo_and_revision_exists_cache.pop((resolved_path.repo_type, resolved_path.repo_id, None), None)
|
| 258 |
+
self._repo_and_revision_exists_cache.pop(
|
| 259 |
+
(resolved_path.repo_type, resolved_path.repo_id, resolved_path.revision), None
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
def _open(
|
| 263 |
+
self,
|
| 264 |
+
path: str,
|
| 265 |
+
mode: str = "rb",
|
| 266 |
+
revision: Optional[str] = None,
|
| 267 |
+
block_size: Optional[int] = None,
|
| 268 |
+
**kwargs,
|
| 269 |
+
) -> "HfFileSystemFile":
|
| 270 |
+
if "a" in mode:
|
| 271 |
+
raise NotImplementedError("Appending to remote files is not yet supported.")
|
| 272 |
+
if block_size == 0:
|
| 273 |
+
return HfFileSystemStreamFile(self, path, mode=mode, revision=revision, block_size=block_size, **kwargs)
|
| 274 |
+
else:
|
| 275 |
+
return HfFileSystemFile(self, path, mode=mode, revision=revision, block_size=block_size, **kwargs)
|
| 276 |
+
|
| 277 |
+
def _rm(self, path: str, revision: Optional[str] = None, **kwargs) -> None:
|
| 278 |
+
resolved_path = self.resolve_path(path, revision=revision)
|
| 279 |
+
self._api.delete_file(
|
| 280 |
+
path_in_repo=resolved_path.path_in_repo,
|
| 281 |
+
repo_id=resolved_path.repo_id,
|
| 282 |
+
token=self.token,
|
| 283 |
+
repo_type=resolved_path.repo_type,
|
| 284 |
+
revision=resolved_path.revision,
|
| 285 |
+
commit_message=kwargs.get("commit_message"),
|
| 286 |
+
commit_description=kwargs.get("commit_description"),
|
| 287 |
+
)
|
| 288 |
+
self.invalidate_cache(path=resolved_path.unresolve())
|
| 289 |
+
|
| 290 |
+
def rm(
|
| 291 |
+
self,
|
| 292 |
+
path: str,
|
| 293 |
+
recursive: bool = False,
|
| 294 |
+
maxdepth: Optional[int] = None,
|
| 295 |
+
revision: Optional[str] = None,
|
| 296 |
+
**kwargs,
|
| 297 |
+
) -> None:
|
| 298 |
+
"""
|
| 299 |
+
Delete files from a repository.
|
| 300 |
+
|
| 301 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.rm).
|
| 302 |
+
|
| 303 |
+
<Tip warning={true}>
|
| 304 |
+
|
| 305 |
+
Note: When possible, use `HfApi.delete_file()` for better performance.
|
| 306 |
+
|
| 307 |
+
</Tip>
|
| 308 |
+
|
| 309 |
+
Args:
|
| 310 |
+
path (`str`):
|
| 311 |
+
Path to delete.
|
| 312 |
+
recursive (`bool`, *optional*):
|
| 313 |
+
If True, delete directory and all its contents. Defaults to False.
|
| 314 |
+
maxdepth (`int`, *optional*):
|
| 315 |
+
Maximum number of subdirectories to visit when deleting recursively.
|
| 316 |
+
revision (`str`, *optional*):
|
| 317 |
+
The git revision to delete from.
|
| 318 |
+
|
| 319 |
+
"""
|
| 320 |
+
resolved_path = self.resolve_path(path, revision=revision)
|
| 321 |
+
paths = self.expand_path(path, recursive=recursive, maxdepth=maxdepth, revision=revision)
|
| 322 |
+
paths_in_repo = [self.resolve_path(path).path_in_repo for path in paths if not self.isdir(path)]
|
| 323 |
+
operations = [CommitOperationDelete(path_in_repo=path_in_repo) for path_in_repo in paths_in_repo]
|
| 324 |
+
commit_message = f"Delete {path} "
|
| 325 |
+
commit_message += "recursively " if recursive else ""
|
| 326 |
+
commit_message += f"up to depth {maxdepth} " if maxdepth is not None else ""
|
| 327 |
+
# TODO: use `commit_description` to list all the deleted paths?
|
| 328 |
+
self._api.create_commit(
|
| 329 |
+
repo_id=resolved_path.repo_id,
|
| 330 |
+
repo_type=resolved_path.repo_type,
|
| 331 |
+
token=self.token,
|
| 332 |
+
operations=operations,
|
| 333 |
+
revision=resolved_path.revision,
|
| 334 |
+
commit_message=kwargs.get("commit_message", commit_message),
|
| 335 |
+
commit_description=kwargs.get("commit_description"),
|
| 336 |
+
)
|
| 337 |
+
self.invalidate_cache(path=resolved_path.unresolve())
|
| 338 |
+
|
| 339 |
+
def ls(
|
| 340 |
+
self, path: str, detail: bool = True, refresh: bool = False, revision: Optional[str] = None, **kwargs
|
| 341 |
+
) -> List[Union[str, Dict[str, Any]]]:
|
| 342 |
+
"""
|
| 343 |
+
List the contents of a directory.
|
| 344 |
+
|
| 345 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.ls).
|
| 346 |
+
|
| 347 |
+
<Tip warning={true}>
|
| 348 |
+
|
| 349 |
+
Note: When possible, use `HfApi.list_repo_tree()` for better performance.
|
| 350 |
+
|
| 351 |
+
</Tip>
|
| 352 |
+
|
| 353 |
+
Args:
|
| 354 |
+
path (`str`):
|
| 355 |
+
Path to the directory.
|
| 356 |
+
detail (`bool`, *optional*):
|
| 357 |
+
If True, returns a list of dictionaries containing file information. If False,
|
| 358 |
+
returns a list of file paths. Defaults to True.
|
| 359 |
+
refresh (`bool`, *optional*):
|
| 360 |
+
If True, bypass the cache and fetch the latest data. Defaults to False.
|
| 361 |
+
revision (`str`, *optional*):
|
| 362 |
+
The git revision to list from.
|
| 363 |
+
|
| 364 |
+
Returns:
|
| 365 |
+
`List[Union[str, Dict[str, Any]]]`: List of file paths (if detail=False) or list of file information
|
| 366 |
+
dictionaries (if detail=True).
|
| 367 |
+
"""
|
| 368 |
+
resolved_path = self.resolve_path(path, revision=revision)
|
| 369 |
+
path = resolved_path.unresolve()
|
| 370 |
+
kwargs = {"expand_info": detail, **kwargs}
|
| 371 |
+
try:
|
| 372 |
+
out = self._ls_tree(path, refresh=refresh, revision=revision, **kwargs)
|
| 373 |
+
except EntryNotFoundError:
|
| 374 |
+
# Path could be a file
|
| 375 |
+
if not resolved_path.path_in_repo:
|
| 376 |
+
_raise_file_not_found(path, None)
|
| 377 |
+
out = self._ls_tree(self._parent(path), refresh=refresh, revision=revision, **kwargs)
|
| 378 |
+
out = [o for o in out if o["name"] == path]
|
| 379 |
+
if len(out) == 0:
|
| 380 |
+
_raise_file_not_found(path, None)
|
| 381 |
+
return out if detail else [o["name"] for o in out]
|
| 382 |
+
|
| 383 |
+
def _ls_tree(
|
| 384 |
+
self,
|
| 385 |
+
path: str,
|
| 386 |
+
recursive: bool = False,
|
| 387 |
+
refresh: bool = False,
|
| 388 |
+
revision: Optional[str] = None,
|
| 389 |
+
expand_info: bool = True,
|
| 390 |
+
):
|
| 391 |
+
resolved_path = self.resolve_path(path, revision=revision)
|
| 392 |
+
path = resolved_path.unresolve()
|
| 393 |
+
root_path = HfFileSystemResolvedPath(
|
| 394 |
+
resolved_path.repo_type,
|
| 395 |
+
resolved_path.repo_id,
|
| 396 |
+
resolved_path.revision,
|
| 397 |
+
path_in_repo="",
|
| 398 |
+
_raw_revision=resolved_path._raw_revision,
|
| 399 |
+
).unresolve()
|
| 400 |
+
|
| 401 |
+
out = []
|
| 402 |
+
if path in self.dircache and not refresh:
|
| 403 |
+
cached_path_infos = self.dircache[path]
|
| 404 |
+
out.extend(cached_path_infos)
|
| 405 |
+
dirs_not_in_dircache = []
|
| 406 |
+
if recursive:
|
| 407 |
+
# Use BFS to traverse the cache and build the "recursive "output
|
| 408 |
+
# (The Hub uses a so-called "tree first" strategy for the tree endpoint but we sort the output to follow the spec so the result is (eventually) the same)
|
| 409 |
+
dirs_to_visit = deque(
|
| 410 |
+
[path_info for path_info in cached_path_infos if path_info["type"] == "directory"]
|
| 411 |
+
)
|
| 412 |
+
while dirs_to_visit:
|
| 413 |
+
dir_info = dirs_to_visit.popleft()
|
| 414 |
+
if dir_info["name"] not in self.dircache:
|
| 415 |
+
dirs_not_in_dircache.append(dir_info["name"])
|
| 416 |
+
else:
|
| 417 |
+
cached_path_infos = self.dircache[dir_info["name"]]
|
| 418 |
+
out.extend(cached_path_infos)
|
| 419 |
+
dirs_to_visit.extend(
|
| 420 |
+
[path_info for path_info in cached_path_infos if path_info["type"] == "directory"]
|
| 421 |
+
)
|
| 422 |
+
|
| 423 |
+
dirs_not_expanded = []
|
| 424 |
+
if expand_info:
|
| 425 |
+
# Check if there are directories with non-expanded entries
|
| 426 |
+
dirs_not_expanded = [self._parent(o["name"]) for o in out if o["last_commit"] is None]
|
| 427 |
+
|
| 428 |
+
if (recursive and dirs_not_in_dircache) or (expand_info and dirs_not_expanded):
|
| 429 |
+
# If the dircache is incomplete, find the common path of the missing and non-expanded entries
|
| 430 |
+
# and extend the output with the result of `_ls_tree(common_path, recursive=True)`
|
| 431 |
+
common_prefix = os.path.commonprefix(dirs_not_in_dircache + dirs_not_expanded)
|
| 432 |
+
# Get the parent directory if the common prefix itself is not a directory
|
| 433 |
+
common_path = (
|
| 434 |
+
common_prefix.rstrip("/")
|
| 435 |
+
if common_prefix.endswith("/")
|
| 436 |
+
or common_prefix == root_path
|
| 437 |
+
or common_prefix in chain(dirs_not_in_dircache, dirs_not_expanded)
|
| 438 |
+
else self._parent(common_prefix)
|
| 439 |
+
)
|
| 440 |
+
out = [o for o in out if not o["name"].startswith(common_path + "/")]
|
| 441 |
+
for cached_path in self.dircache:
|
| 442 |
+
if cached_path.startswith(common_path + "/"):
|
| 443 |
+
self.dircache.pop(cached_path, None)
|
| 444 |
+
self.dircache.pop(common_path, None)
|
| 445 |
+
out.extend(
|
| 446 |
+
self._ls_tree(
|
| 447 |
+
common_path,
|
| 448 |
+
recursive=recursive,
|
| 449 |
+
refresh=True,
|
| 450 |
+
revision=revision,
|
| 451 |
+
expand_info=expand_info,
|
| 452 |
+
)
|
| 453 |
+
)
|
| 454 |
+
else:
|
| 455 |
+
tree = self._api.list_repo_tree(
|
| 456 |
+
resolved_path.repo_id,
|
| 457 |
+
resolved_path.path_in_repo,
|
| 458 |
+
recursive=recursive,
|
| 459 |
+
expand=expand_info,
|
| 460 |
+
revision=resolved_path.revision,
|
| 461 |
+
repo_type=resolved_path.repo_type,
|
| 462 |
+
)
|
| 463 |
+
for path_info in tree:
|
| 464 |
+
if isinstance(path_info, RepoFile):
|
| 465 |
+
cache_path_info = {
|
| 466 |
+
"name": root_path + "/" + path_info.path,
|
| 467 |
+
"size": path_info.size,
|
| 468 |
+
"type": "file",
|
| 469 |
+
"blob_id": path_info.blob_id,
|
| 470 |
+
"lfs": path_info.lfs,
|
| 471 |
+
"last_commit": path_info.last_commit,
|
| 472 |
+
"security": path_info.security,
|
| 473 |
+
}
|
| 474 |
+
else:
|
| 475 |
+
cache_path_info = {
|
| 476 |
+
"name": root_path + "/" + path_info.path,
|
| 477 |
+
"size": 0,
|
| 478 |
+
"type": "directory",
|
| 479 |
+
"tree_id": path_info.tree_id,
|
| 480 |
+
"last_commit": path_info.last_commit,
|
| 481 |
+
}
|
| 482 |
+
parent_path = self._parent(cache_path_info["name"])
|
| 483 |
+
self.dircache.setdefault(parent_path, []).append(cache_path_info)
|
| 484 |
+
out.append(cache_path_info)
|
| 485 |
+
return out
|
| 486 |
+
|
| 487 |
+
def walk(self, path: str, *args, **kwargs) -> Iterator[Tuple[str, List[str], List[str]]]:
|
| 488 |
+
"""
|
| 489 |
+
Return all files below the given path.
|
| 490 |
+
|
| 491 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.walk).
|
| 492 |
+
|
| 493 |
+
Args:
|
| 494 |
+
path (`str`):
|
| 495 |
+
Root path to list files from.
|
| 496 |
+
|
| 497 |
+
Returns:
|
| 498 |
+
`Iterator[Tuple[str, List[str], List[str]]]`: An iterator of (path, list of directory names, list of file names) tuples.
|
| 499 |
+
"""
|
| 500 |
+
# Set expand_info=False by default to get a x10 speed boost
|
| 501 |
+
kwargs = {"expand_info": kwargs.get("detail", False), **kwargs}
|
| 502 |
+
path = self.resolve_path(path, revision=kwargs.get("revision")).unresolve()
|
| 503 |
+
yield from super().walk(path, *args, **kwargs)
|
| 504 |
+
|
| 505 |
+
def glob(self, path: str, **kwargs) -> List[str]:
|
| 506 |
+
"""
|
| 507 |
+
Find files by glob-matching.
|
| 508 |
+
|
| 509 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.glob).
|
| 510 |
+
|
| 511 |
+
Args:
|
| 512 |
+
path (`str`):
|
| 513 |
+
Path pattern to match.
|
| 514 |
+
|
| 515 |
+
Returns:
|
| 516 |
+
`List[str]`: List of paths matching the pattern.
|
| 517 |
+
"""
|
| 518 |
+
# Set expand_info=False by default to get a x10 speed boost
|
| 519 |
+
kwargs = {"expand_info": kwargs.get("detail", False), **kwargs}
|
| 520 |
+
path = self.resolve_path(path, revision=kwargs.get("revision")).unresolve()
|
| 521 |
+
return super().glob(path, **kwargs)
|
| 522 |
+
|
| 523 |
+
def find(
|
| 524 |
+
self,
|
| 525 |
+
path: str,
|
| 526 |
+
maxdepth: Optional[int] = None,
|
| 527 |
+
withdirs: bool = False,
|
| 528 |
+
detail: bool = False,
|
| 529 |
+
refresh: bool = False,
|
| 530 |
+
revision: Optional[str] = None,
|
| 531 |
+
**kwargs,
|
| 532 |
+
) -> Union[List[str], Dict[str, Dict[str, Any]]]:
|
| 533 |
+
"""
|
| 534 |
+
List all files below path.
|
| 535 |
+
|
| 536 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.find).
|
| 537 |
+
|
| 538 |
+
Args:
|
| 539 |
+
path (`str`):
|
| 540 |
+
Root path to list files from.
|
| 541 |
+
maxdepth (`int`, *optional*):
|
| 542 |
+
Maximum depth to descend into subdirectories.
|
| 543 |
+
withdirs (`bool`, *optional*):
|
| 544 |
+
Include directory paths in the output. Defaults to False.
|
| 545 |
+
detail (`bool`, *optional*):
|
| 546 |
+
If True, returns a dict mapping paths to file information. Defaults to False.
|
| 547 |
+
refresh (`bool`, *optional*):
|
| 548 |
+
If True, bypass the cache and fetch the latest data. Defaults to False.
|
| 549 |
+
revision (`str`, *optional*):
|
| 550 |
+
The git revision to list from.
|
| 551 |
+
|
| 552 |
+
Returns:
|
| 553 |
+
`Union[List[str], Dict[str, Dict[str, Any]]]`: List of paths or dict of file information.
|
| 554 |
+
"""
|
| 555 |
+
if maxdepth:
|
| 556 |
+
return super().find(
|
| 557 |
+
path, maxdepth=maxdepth, withdirs=withdirs, detail=detail, refresh=refresh, revision=revision, **kwargs
|
| 558 |
+
)
|
| 559 |
+
resolved_path = self.resolve_path(path, revision=revision)
|
| 560 |
+
path = resolved_path.unresolve()
|
| 561 |
+
kwargs = {"expand_info": detail, **kwargs}
|
| 562 |
+
try:
|
| 563 |
+
out = self._ls_tree(path, recursive=True, refresh=refresh, revision=resolved_path.revision, **kwargs)
|
| 564 |
+
except EntryNotFoundError:
|
| 565 |
+
# Path could be a file
|
| 566 |
+
if self.info(path, revision=revision, **kwargs)["type"] == "file":
|
| 567 |
+
out = {path: {}}
|
| 568 |
+
else:
|
| 569 |
+
out = {}
|
| 570 |
+
else:
|
| 571 |
+
if not withdirs:
|
| 572 |
+
out = [o for o in out if o["type"] != "directory"]
|
| 573 |
+
else:
|
| 574 |
+
# If `withdirs=True`, include the directory itself to be consistent with the spec
|
| 575 |
+
path_info = self.info(path, revision=resolved_path.revision, **kwargs)
|
| 576 |
+
out = [path_info] + out if path_info["type"] == "directory" else out
|
| 577 |
+
out = {o["name"]: o for o in out}
|
| 578 |
+
names = sorted(out)
|
| 579 |
+
if not detail:
|
| 580 |
+
return names
|
| 581 |
+
else:
|
| 582 |
+
return {name: out[name] for name in names}
|
| 583 |
+
|
| 584 |
+
def cp_file(self, path1: str, path2: str, revision: Optional[str] = None, **kwargs) -> None:
|
| 585 |
+
"""
|
| 586 |
+
Copy a file within or between repositories.
|
| 587 |
+
|
| 588 |
+
<Tip warning={true}>
|
| 589 |
+
|
| 590 |
+
Note: When possible, use `HfApi.upload_file()` for better performance.
|
| 591 |
+
|
| 592 |
+
</Tip>
|
| 593 |
+
|
| 594 |
+
Args:
|
| 595 |
+
path1 (`str`):
|
| 596 |
+
Source path to copy from.
|
| 597 |
+
path2 (`str`):
|
| 598 |
+
Destination path to copy to.
|
| 599 |
+
revision (`str`, *optional*):
|
| 600 |
+
The git revision to copy from.
|
| 601 |
+
|
| 602 |
+
"""
|
| 603 |
+
resolved_path1 = self.resolve_path(path1, revision=revision)
|
| 604 |
+
resolved_path2 = self.resolve_path(path2, revision=revision)
|
| 605 |
+
|
| 606 |
+
same_repo = (
|
| 607 |
+
resolved_path1.repo_type == resolved_path2.repo_type and resolved_path1.repo_id == resolved_path2.repo_id
|
| 608 |
+
)
|
| 609 |
+
|
| 610 |
+
if same_repo:
|
| 611 |
+
commit_message = f"Copy {path1} to {path2}"
|
| 612 |
+
self._api.create_commit(
|
| 613 |
+
repo_id=resolved_path1.repo_id,
|
| 614 |
+
repo_type=resolved_path1.repo_type,
|
| 615 |
+
revision=resolved_path2.revision,
|
| 616 |
+
commit_message=kwargs.get("commit_message", commit_message),
|
| 617 |
+
commit_description=kwargs.get("commit_description", ""),
|
| 618 |
+
operations=[
|
| 619 |
+
CommitOperationCopy(
|
| 620 |
+
src_path_in_repo=resolved_path1.path_in_repo,
|
| 621 |
+
path_in_repo=resolved_path2.path_in_repo,
|
| 622 |
+
src_revision=resolved_path1.revision,
|
| 623 |
+
)
|
| 624 |
+
],
|
| 625 |
+
)
|
| 626 |
+
else:
|
| 627 |
+
with self.open(path1, "rb", revision=resolved_path1.revision) as f:
|
| 628 |
+
content = f.read()
|
| 629 |
+
commit_message = f"Copy {path1} to {path2}"
|
| 630 |
+
self._api.upload_file(
|
| 631 |
+
path_or_fileobj=content,
|
| 632 |
+
path_in_repo=resolved_path2.path_in_repo,
|
| 633 |
+
repo_id=resolved_path2.repo_id,
|
| 634 |
+
token=self.token,
|
| 635 |
+
repo_type=resolved_path2.repo_type,
|
| 636 |
+
revision=resolved_path2.revision,
|
| 637 |
+
commit_message=kwargs.get("commit_message", commit_message),
|
| 638 |
+
commit_description=kwargs.get("commit_description"),
|
| 639 |
+
)
|
| 640 |
+
self.invalidate_cache(path=resolved_path1.unresolve())
|
| 641 |
+
self.invalidate_cache(path=resolved_path2.unresolve())
|
| 642 |
+
|
| 643 |
+
def modified(self, path: str, **kwargs) -> datetime:
|
| 644 |
+
"""
|
| 645 |
+
Get the last modified time of a file.
|
| 646 |
+
|
| 647 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.modified).
|
| 648 |
+
|
| 649 |
+
Args:
|
| 650 |
+
path (`str`):
|
| 651 |
+
Path to the file.
|
| 652 |
+
|
| 653 |
+
Returns:
|
| 654 |
+
`datetime`: Last commit date of the file.
|
| 655 |
+
"""
|
| 656 |
+
info = self.info(path, **kwargs)
|
| 657 |
+
return info["last_commit"]["date"]
|
| 658 |
+
|
| 659 |
+
def info(self, path: str, refresh: bool = False, revision: Optional[str] = None, **kwargs) -> Dict[str, Any]:
|
| 660 |
+
"""
|
| 661 |
+
Get information about a file or directory.
|
| 662 |
+
|
| 663 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.info).
|
| 664 |
+
|
| 665 |
+
<Tip warning={true}>
|
| 666 |
+
|
| 667 |
+
Note: When possible, use `HfApi.get_paths_info()` or `HfApi.repo_info()` for better performance.
|
| 668 |
+
|
| 669 |
+
</Tip>
|
| 670 |
+
|
| 671 |
+
Args:
|
| 672 |
+
path (`str`):
|
| 673 |
+
Path to get info for.
|
| 674 |
+
refresh (`bool`, *optional*):
|
| 675 |
+
If True, bypass the cache and fetch the latest data. Defaults to False.
|
| 676 |
+
revision (`str`, *optional*):
|
| 677 |
+
The git revision to get info from.
|
| 678 |
+
|
| 679 |
+
Returns:
|
| 680 |
+
`Dict[str, Any]`: Dictionary containing file information (type, size, commit info, etc.).
|
| 681 |
+
|
| 682 |
+
"""
|
| 683 |
+
resolved_path = self.resolve_path(path, revision=revision)
|
| 684 |
+
path = resolved_path.unresolve()
|
| 685 |
+
expand_info = kwargs.get(
|
| 686 |
+
"expand_info", True
|
| 687 |
+
) # don't expose it as a parameter in the public API to follow the spec
|
| 688 |
+
if not resolved_path.path_in_repo:
|
| 689 |
+
# Path is the root directory
|
| 690 |
+
out = {
|
| 691 |
+
"name": path,
|
| 692 |
+
"size": 0,
|
| 693 |
+
"type": "directory",
|
| 694 |
+
}
|
| 695 |
+
if expand_info:
|
| 696 |
+
last_commit = self._api.list_repo_commits(
|
| 697 |
+
resolved_path.repo_id, repo_type=resolved_path.repo_type, revision=resolved_path.revision
|
| 698 |
+
)[-1]
|
| 699 |
+
out = {
|
| 700 |
+
**out,
|
| 701 |
+
"tree_id": None, # TODO: tree_id of the root directory?
|
| 702 |
+
"last_commit": LastCommitInfo(
|
| 703 |
+
oid=last_commit.commit_id, title=last_commit.title, date=last_commit.created_at
|
| 704 |
+
),
|
| 705 |
+
}
|
| 706 |
+
else:
|
| 707 |
+
out = None
|
| 708 |
+
parent_path = self._parent(path)
|
| 709 |
+
if not expand_info and parent_path not in self.dircache:
|
| 710 |
+
# Fill the cache with cheap call
|
| 711 |
+
self.ls(parent_path, expand_info=False)
|
| 712 |
+
if parent_path in self.dircache:
|
| 713 |
+
# Check if the path is in the cache
|
| 714 |
+
out1 = [o for o in self.dircache[parent_path] if o["name"] == path]
|
| 715 |
+
if not out1:
|
| 716 |
+
_raise_file_not_found(path, None)
|
| 717 |
+
out = out1[0]
|
| 718 |
+
if refresh or out is None or (expand_info and out and out["last_commit"] is None):
|
| 719 |
+
paths_info = self._api.get_paths_info(
|
| 720 |
+
resolved_path.repo_id,
|
| 721 |
+
resolved_path.path_in_repo,
|
| 722 |
+
expand=expand_info,
|
| 723 |
+
revision=resolved_path.revision,
|
| 724 |
+
repo_type=resolved_path.repo_type,
|
| 725 |
+
)
|
| 726 |
+
if not paths_info:
|
| 727 |
+
_raise_file_not_found(path, None)
|
| 728 |
+
path_info = paths_info[0]
|
| 729 |
+
root_path = HfFileSystemResolvedPath(
|
| 730 |
+
resolved_path.repo_type,
|
| 731 |
+
resolved_path.repo_id,
|
| 732 |
+
resolved_path.revision,
|
| 733 |
+
path_in_repo="",
|
| 734 |
+
_raw_revision=resolved_path._raw_revision,
|
| 735 |
+
).unresolve()
|
| 736 |
+
if isinstance(path_info, RepoFile):
|
| 737 |
+
out = {
|
| 738 |
+
"name": root_path + "/" + path_info.path,
|
| 739 |
+
"size": path_info.size,
|
| 740 |
+
"type": "file",
|
| 741 |
+
"blob_id": path_info.blob_id,
|
| 742 |
+
"lfs": path_info.lfs,
|
| 743 |
+
"last_commit": path_info.last_commit,
|
| 744 |
+
"security": path_info.security,
|
| 745 |
+
}
|
| 746 |
+
else:
|
| 747 |
+
out = {
|
| 748 |
+
"name": root_path + "/" + path_info.path,
|
| 749 |
+
"size": 0,
|
| 750 |
+
"type": "directory",
|
| 751 |
+
"tree_id": path_info.tree_id,
|
| 752 |
+
"last_commit": path_info.last_commit,
|
| 753 |
+
}
|
| 754 |
+
if not expand_info:
|
| 755 |
+
out = {k: out[k] for k in ["name", "size", "type"]}
|
| 756 |
+
assert out is not None
|
| 757 |
+
return out
|
| 758 |
+
|
| 759 |
+
def exists(self, path, **kwargs):
|
| 760 |
+
"""
|
| 761 |
+
Check if a file exists.
|
| 762 |
+
|
| 763 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.exists).
|
| 764 |
+
|
| 765 |
+
<Tip warning={true}>
|
| 766 |
+
|
| 767 |
+
Note: When possible, use `HfApi.file_exists()` for better performance.
|
| 768 |
+
|
| 769 |
+
</Tip>
|
| 770 |
+
|
| 771 |
+
Args:
|
| 772 |
+
path (`str`):
|
| 773 |
+
Path to check.
|
| 774 |
+
|
| 775 |
+
Returns:
|
| 776 |
+
`bool`: True if file exists, False otherwise.
|
| 777 |
+
"""
|
| 778 |
+
try:
|
| 779 |
+
if kwargs.get("refresh", False):
|
| 780 |
+
self.invalidate_cache(path)
|
| 781 |
+
|
| 782 |
+
self.info(path, **{**kwargs, "expand_info": False})
|
| 783 |
+
return True
|
| 784 |
+
except: # noqa: E722
|
| 785 |
+
return False
|
| 786 |
+
|
| 787 |
+
def isdir(self, path):
|
| 788 |
+
"""
|
| 789 |
+
Check if a path is a directory.
|
| 790 |
+
|
| 791 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.isdir).
|
| 792 |
+
|
| 793 |
+
Args:
|
| 794 |
+
path (`str`):
|
| 795 |
+
Path to check.
|
| 796 |
+
|
| 797 |
+
Returns:
|
| 798 |
+
`bool`: True if path is a directory, False otherwise.
|
| 799 |
+
"""
|
| 800 |
+
try:
|
| 801 |
+
return self.info(path, expand_info=False)["type"] == "directory"
|
| 802 |
+
except OSError:
|
| 803 |
+
return False
|
| 804 |
+
|
| 805 |
+
def isfile(self, path):
|
| 806 |
+
"""
|
| 807 |
+
Check if a path is a file.
|
| 808 |
+
|
| 809 |
+
For more details, refer to [fsspec documentation](https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.isfile).
|
| 810 |
+
|
| 811 |
+
Args:
|
| 812 |
+
path (`str`):
|
| 813 |
+
Path to check.
|
| 814 |
+
|
| 815 |
+
Returns:
|
| 816 |
+
`bool`: True if path is a file, False otherwise.
|
| 817 |
+
"""
|
| 818 |
+
try:
|
| 819 |
+
return self.info(path, expand_info=False)["type"] == "file"
|
| 820 |
+
except: # noqa: E722
|
| 821 |
+
return False
|
| 822 |
+
|
| 823 |
+
def url(self, path: str) -> str:
|
| 824 |
+
"""
|
| 825 |
+
Get the HTTP URL of the given path.
|
| 826 |
+
|
| 827 |
+
Args:
|
| 828 |
+
path (`str`):
|
| 829 |
+
Path to get URL for.
|
| 830 |
+
|
| 831 |
+
Returns:
|
| 832 |
+
`str`: HTTP URL to access the file or directory on the Hub.
|
| 833 |
+
"""
|
| 834 |
+
resolved_path = self.resolve_path(path)
|
| 835 |
+
url = hf_hub_url(
|
| 836 |
+
resolved_path.repo_id,
|
| 837 |
+
resolved_path.path_in_repo,
|
| 838 |
+
repo_type=resolved_path.repo_type,
|
| 839 |
+
revision=resolved_path.revision,
|
| 840 |
+
endpoint=self.endpoint,
|
| 841 |
+
)
|
| 842 |
+
if self.isdir(path):
|
| 843 |
+
url = url.replace("/resolve/", "/tree/", 1)
|
| 844 |
+
return url
|
| 845 |
+
|
| 846 |
+
def get_file(self, rpath, lpath, callback=_DEFAULT_CALLBACK, outfile=None, **kwargs) -> None:
|
| 847 |
+
"""
|
| 848 |
+
Copy single remote file to local.
|
| 849 |
+
|
| 850 |
+
<Tip warning={true}>
|
| 851 |
+
|
| 852 |
+
Note: When possible, use `HfApi.hf_hub_download()` for better performance.
|
| 853 |
+
|
| 854 |
+
</Tip>
|
| 855 |
+
|
| 856 |
+
Args:
|
| 857 |
+
rpath (`str`):
|
| 858 |
+
Remote path to download from.
|
| 859 |
+
lpath (`str`):
|
| 860 |
+
Local path to download to.
|
| 861 |
+
callback (`Callback`, *optional*):
|
| 862 |
+
Optional callback to track download progress. Defaults to no callback.
|
| 863 |
+
outfile (`IO`, *optional*):
|
| 864 |
+
Optional file-like object to write to. If provided, `lpath` is ignored.
|
| 865 |
+
|
| 866 |
+
"""
|
| 867 |
+
revision = kwargs.get("revision")
|
| 868 |
+
unhandled_kwargs = set(kwargs.keys()) - {"revision"}
|
| 869 |
+
if not isinstance(callback, (NoOpCallback, TqdmCallback)) or len(unhandled_kwargs) > 0:
|
| 870 |
+
# for now, let's not handle custom callbacks
|
| 871 |
+
# and let's not handle custom kwargs
|
| 872 |
+
return super().get_file(rpath, lpath, callback=callback, outfile=outfile, **kwargs)
|
| 873 |
+
|
| 874 |
+
# Taken from https://github.com/fsspec/filesystem_spec/blob/47b445ae4c284a82dd15e0287b1ffc410e8fc470/fsspec/spec.py#L883
|
| 875 |
+
if isfilelike(lpath):
|
| 876 |
+
outfile = lpath
|
| 877 |
+
elif self.isdir(rpath):
|
| 878 |
+
os.makedirs(lpath, exist_ok=True)
|
| 879 |
+
return None
|
| 880 |
+
|
| 881 |
+
if isinstance(lpath, (str, Path)): # otherwise, let's assume it's a file-like object
|
| 882 |
+
os.makedirs(os.path.dirname(lpath), exist_ok=True)
|
| 883 |
+
|
| 884 |
+
# Open file if not already open
|
| 885 |
+
close_file = False
|
| 886 |
+
if outfile is None:
|
| 887 |
+
outfile = open(lpath, "wb")
|
| 888 |
+
close_file = True
|
| 889 |
+
initial_pos = outfile.tell()
|
| 890 |
+
|
| 891 |
+
# Custom implementation of `get_file` to use `http_get`.
|
| 892 |
+
resolve_remote_path = self.resolve_path(rpath, revision=revision)
|
| 893 |
+
expected_size = self.info(rpath, revision=revision)["size"]
|
| 894 |
+
callback.set_size(expected_size)
|
| 895 |
+
try:
|
| 896 |
+
http_get(
|
| 897 |
+
url=hf_hub_url(
|
| 898 |
+
repo_id=resolve_remote_path.repo_id,
|
| 899 |
+
revision=resolve_remote_path.revision,
|
| 900 |
+
filename=resolve_remote_path.path_in_repo,
|
| 901 |
+
repo_type=resolve_remote_path.repo_type,
|
| 902 |
+
endpoint=self.endpoint,
|
| 903 |
+
),
|
| 904 |
+
temp_file=outfile,
|
| 905 |
+
displayed_filename=rpath,
|
| 906 |
+
expected_size=expected_size,
|
| 907 |
+
resume_size=0,
|
| 908 |
+
headers=self._api._build_hf_headers(),
|
| 909 |
+
_tqdm_bar=callback.tqdm if isinstance(callback, TqdmCallback) else None,
|
| 910 |
+
)
|
| 911 |
+
outfile.seek(initial_pos)
|
| 912 |
+
finally:
|
| 913 |
+
# Close file only if we opened it ourselves
|
| 914 |
+
if close_file:
|
| 915 |
+
outfile.close()
|
| 916 |
+
|
| 917 |
+
@property
|
| 918 |
+
def transaction(self):
|
| 919 |
+
"""A context within which files are committed together upon exit
|
| 920 |
+
|
| 921 |
+
Requires the file class to implement `.commit()` and `.discard()`
|
| 922 |
+
for the normal and exception cases.
|
| 923 |
+
"""
|
| 924 |
+
# Taken from https://github.com/fsspec/filesystem_spec/blob/3fbb6fee33b46cccb015607630843dea049d3243/fsspec/spec.py#L231
|
| 925 |
+
# See https://github.com/huggingface/huggingface_hub/issues/1733
|
| 926 |
+
raise NotImplementedError("Transactional commits are not supported.")
|
| 927 |
+
|
| 928 |
+
def start_transaction(self):
|
| 929 |
+
"""Begin write transaction for deferring files, non-context version"""
|
| 930 |
+
# Taken from https://github.com/fsspec/filesystem_spec/blob/3fbb6fee33b46cccb015607630843dea049d3243/fsspec/spec.py#L241
|
| 931 |
+
# See https://github.com/huggingface/huggingface_hub/issues/1733
|
| 932 |
+
raise NotImplementedError("Transactional commits are not supported.")
|
| 933 |
+
|
| 934 |
+
|
| 935 |
+
class HfFileSystemFile(fsspec.spec.AbstractBufferedFile):
|
| 936 |
+
def __init__(self, fs: HfFileSystem, path: str, revision: Optional[str] = None, **kwargs):
|
| 937 |
+
try:
|
| 938 |
+
self.resolved_path = fs.resolve_path(path, revision=revision)
|
| 939 |
+
except FileNotFoundError as e:
|
| 940 |
+
if "w" in kwargs.get("mode", ""):
|
| 941 |
+
raise FileNotFoundError(
|
| 942 |
+
f"{e}.\nMake sure the repository and revision exist before writing data."
|
| 943 |
+
) from e
|
| 944 |
+
raise
|
| 945 |
+
# avoid an unnecessary .info() call with expensive expand_info=True to instantiate .details
|
| 946 |
+
if kwargs.get("mode", "rb") == "rb":
|
| 947 |
+
self.details = fs.info(self.resolved_path.unresolve(), expand_info=False)
|
| 948 |
+
super().__init__(fs, self.resolved_path.unresolve(), **kwargs)
|
| 949 |
+
self.fs: HfFileSystem
|
| 950 |
+
|
| 951 |
+
def __del__(self):
|
| 952 |
+
if not hasattr(self, "resolved_path"):
|
| 953 |
+
# Means that the constructor failed. Nothing to do.
|
| 954 |
+
return
|
| 955 |
+
return super().__del__()
|
| 956 |
+
|
| 957 |
+
def _fetch_range(self, start: int, end: int) -> bytes:
|
| 958 |
+
headers = {
|
| 959 |
+
"range": f"bytes={start}-{end - 1}",
|
| 960 |
+
**self.fs._api._build_hf_headers(),
|
| 961 |
+
}
|
| 962 |
+
url = hf_hub_url(
|
| 963 |
+
repo_id=self.resolved_path.repo_id,
|
| 964 |
+
revision=self.resolved_path.revision,
|
| 965 |
+
filename=self.resolved_path.path_in_repo,
|
| 966 |
+
repo_type=self.resolved_path.repo_type,
|
| 967 |
+
endpoint=self.fs.endpoint,
|
| 968 |
+
)
|
| 969 |
+
r = http_backoff(
|
| 970 |
+
"GET",
|
| 971 |
+
url,
|
| 972 |
+
headers=headers,
|
| 973 |
+
retry_on_status_codes=(500, 502, 503, 504),
|
| 974 |
+
timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT,
|
| 975 |
+
)
|
| 976 |
+
hf_raise_for_status(r)
|
| 977 |
+
return r.content
|
| 978 |
+
|
| 979 |
+
def _initiate_upload(self) -> None:
|
| 980 |
+
self.temp_file = tempfile.NamedTemporaryFile(prefix="hffs-", delete=False)
|
| 981 |
+
|
| 982 |
+
def _upload_chunk(self, final: bool = False) -> None:
|
| 983 |
+
self.buffer.seek(0)
|
| 984 |
+
block = self.buffer.read()
|
| 985 |
+
self.temp_file.write(block)
|
| 986 |
+
if final:
|
| 987 |
+
self.temp_file.close()
|
| 988 |
+
self.fs._api.upload_file(
|
| 989 |
+
path_or_fileobj=self.temp_file.name,
|
| 990 |
+
path_in_repo=self.resolved_path.path_in_repo,
|
| 991 |
+
repo_id=self.resolved_path.repo_id,
|
| 992 |
+
token=self.fs.token,
|
| 993 |
+
repo_type=self.resolved_path.repo_type,
|
| 994 |
+
revision=self.resolved_path.revision,
|
| 995 |
+
commit_message=self.kwargs.get("commit_message"),
|
| 996 |
+
commit_description=self.kwargs.get("commit_description"),
|
| 997 |
+
)
|
| 998 |
+
os.remove(self.temp_file.name)
|
| 999 |
+
self.fs.invalidate_cache(
|
| 1000 |
+
path=self.resolved_path.unresolve(),
|
| 1001 |
+
)
|
| 1002 |
+
|
| 1003 |
+
def read(self, length=-1):
|
| 1004 |
+
"""Read remote file.
|
| 1005 |
+
|
| 1006 |
+
If `length` is not provided or is -1, the entire file is downloaded and read. On POSIX systems and if
|
| 1007 |
+
`hf_transfer` is not enabled, the file is loaded in memory directly. Otherwise, the file is downloaded to a
|
| 1008 |
+
temporary file and read from there.
|
| 1009 |
+
"""
|
| 1010 |
+
if self.mode == "rb" and (length is None or length == -1) and self.loc == 0:
|
| 1011 |
+
with self.fs.open(self.path, "rb", block_size=0) as f: # block_size=0 enables fast streaming
|
| 1012 |
+
return f.read()
|
| 1013 |
+
return super().read(length)
|
| 1014 |
+
|
| 1015 |
+
def url(self) -> str:
|
| 1016 |
+
return self.fs.url(self.path)
|
| 1017 |
+
|
| 1018 |
+
|
| 1019 |
+
class HfFileSystemStreamFile(fsspec.spec.AbstractBufferedFile):
|
| 1020 |
+
def __init__(
|
| 1021 |
+
self,
|
| 1022 |
+
fs: HfFileSystem,
|
| 1023 |
+
path: str,
|
| 1024 |
+
mode: str = "rb",
|
| 1025 |
+
revision: Optional[str] = None,
|
| 1026 |
+
block_size: int = 0,
|
| 1027 |
+
cache_type: str = "none",
|
| 1028 |
+
**kwargs,
|
| 1029 |
+
):
|
| 1030 |
+
if block_size != 0:
|
| 1031 |
+
raise ValueError(f"HfFileSystemStreamFile only supports block_size=0 but got {block_size}")
|
| 1032 |
+
if cache_type != "none":
|
| 1033 |
+
raise ValueError(f"HfFileSystemStreamFile only supports cache_type='none' but got {cache_type}")
|
| 1034 |
+
if "w" in mode:
|
| 1035 |
+
raise ValueError(f"HfFileSystemStreamFile only supports reading but got mode='{mode}'")
|
| 1036 |
+
try:
|
| 1037 |
+
self.resolved_path = fs.resolve_path(path, revision=revision)
|
| 1038 |
+
except FileNotFoundError as e:
|
| 1039 |
+
if "w" in kwargs.get("mode", ""):
|
| 1040 |
+
raise FileNotFoundError(
|
| 1041 |
+
f"{e}.\nMake sure the repository and revision exist before writing data."
|
| 1042 |
+
) from e
|
| 1043 |
+
# avoid an unnecessary .info() call to instantiate .details
|
| 1044 |
+
self.details = {"name": self.resolved_path.unresolve(), "size": None}
|
| 1045 |
+
super().__init__(
|
| 1046 |
+
fs, self.resolved_path.unresolve(), mode=mode, block_size=block_size, cache_type=cache_type, **kwargs
|
| 1047 |
+
)
|
| 1048 |
+
self.response: Optional[Response] = None
|
| 1049 |
+
self.fs: HfFileSystem
|
| 1050 |
+
|
| 1051 |
+
def seek(self, loc: int, whence: int = 0):
|
| 1052 |
+
if loc == 0 and whence == 1:
|
| 1053 |
+
return
|
| 1054 |
+
if loc == self.loc and whence == 0:
|
| 1055 |
+
return
|
| 1056 |
+
raise ValueError("Cannot seek streaming HF file")
|
| 1057 |
+
|
| 1058 |
+
def read(self, length: int = -1):
|
| 1059 |
+
read_args = (length,) if length >= 0 else ()
|
| 1060 |
+
if self.response is None or self.response.raw.isclosed():
|
| 1061 |
+
url = hf_hub_url(
|
| 1062 |
+
repo_id=self.resolved_path.repo_id,
|
| 1063 |
+
revision=self.resolved_path.revision,
|
| 1064 |
+
filename=self.resolved_path.path_in_repo,
|
| 1065 |
+
repo_type=self.resolved_path.repo_type,
|
| 1066 |
+
endpoint=self.fs.endpoint,
|
| 1067 |
+
)
|
| 1068 |
+
self.response = http_backoff(
|
| 1069 |
+
"GET",
|
| 1070 |
+
url,
|
| 1071 |
+
headers=self.fs._api._build_hf_headers(),
|
| 1072 |
+
retry_on_status_codes=(500, 502, 503, 504),
|
| 1073 |
+
stream=True,
|
| 1074 |
+
timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT,
|
| 1075 |
+
)
|
| 1076 |
+
hf_raise_for_status(self.response)
|
| 1077 |
+
try:
|
| 1078 |
+
out = self.response.raw.read(*read_args)
|
| 1079 |
+
except Exception:
|
| 1080 |
+
self.response.close()
|
| 1081 |
+
|
| 1082 |
+
# Retry by recreating the connection
|
| 1083 |
+
url = hf_hub_url(
|
| 1084 |
+
repo_id=self.resolved_path.repo_id,
|
| 1085 |
+
revision=self.resolved_path.revision,
|
| 1086 |
+
filename=self.resolved_path.path_in_repo,
|
| 1087 |
+
repo_type=self.resolved_path.repo_type,
|
| 1088 |
+
endpoint=self.fs.endpoint,
|
| 1089 |
+
)
|
| 1090 |
+
self.response = http_backoff(
|
| 1091 |
+
"GET",
|
| 1092 |
+
url,
|
| 1093 |
+
headers={"Range": "bytes=%d-" % self.loc, **self.fs._api._build_hf_headers()},
|
| 1094 |
+
retry_on_status_codes=(500, 502, 503, 504),
|
| 1095 |
+
stream=True,
|
| 1096 |
+
timeout=constants.HF_HUB_DOWNLOAD_TIMEOUT,
|
| 1097 |
+
)
|
| 1098 |
+
hf_raise_for_status(self.response)
|
| 1099 |
+
try:
|
| 1100 |
+
out = self.response.raw.read(*read_args)
|
| 1101 |
+
except Exception:
|
| 1102 |
+
self.response.close()
|
| 1103 |
+
raise
|
| 1104 |
+
self.loc += len(out)
|
| 1105 |
+
return out
|
| 1106 |
+
|
| 1107 |
+
def url(self) -> str:
|
| 1108 |
+
return self.fs.url(self.path)
|
| 1109 |
+
|
| 1110 |
+
def __del__(self):
|
| 1111 |
+
if not hasattr(self, "resolved_path"):
|
| 1112 |
+
# Means that the constructor failed. Nothing to do.
|
| 1113 |
+
return
|
| 1114 |
+
return super().__del__()
|
| 1115 |
+
|
| 1116 |
+
def __reduce__(self):
|
| 1117 |
+
return reopen, (self.fs, self.path, self.mode, self.blocksize, self.cache.name)
|
| 1118 |
+
|
| 1119 |
+
|
| 1120 |
+
def safe_revision(revision: str) -> str:
|
| 1121 |
+
return revision if SPECIAL_REFS_REVISION_REGEX.match(revision) else safe_quote(revision)
|
| 1122 |
+
|
| 1123 |
+
|
| 1124 |
+
def safe_quote(s: str) -> str:
|
| 1125 |
+
return quote(s, safe="")
|
| 1126 |
+
|
| 1127 |
+
|
| 1128 |
+
def _raise_file_not_found(path: str, err: Optional[Exception]) -> NoReturn:
|
| 1129 |
+
msg = path
|
| 1130 |
+
if isinstance(err, RepositoryNotFoundError):
|
| 1131 |
+
msg = f"{path} (repository not found)"
|
| 1132 |
+
elif isinstance(err, RevisionNotFoundError):
|
| 1133 |
+
msg = f"{path} (revision not found)"
|
| 1134 |
+
elif isinstance(err, HFValidationError):
|
| 1135 |
+
msg = f"{path} (invalid repository id)"
|
| 1136 |
+
raise FileNotFoundError(msg) from err
|
| 1137 |
+
|
| 1138 |
+
|
| 1139 |
+
def reopen(fs: HfFileSystem, path: str, mode: str, block_size: int, cache_type: str):
|
| 1140 |
+
return fs.open(path, mode=mode, block_size=block_size, cache_type=cache_type)
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/__init__.py
ADDED
|
File without changes
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (177 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_client.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_common.py
ADDED
|
@@ -0,0 +1,422 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding=utf-8
|
| 2 |
+
# Copyright 2023-present, the HuggingFace Inc. team.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Contains utilities used by both the sync and async inference clients."""
|
| 16 |
+
|
| 17 |
+
import base64
|
| 18 |
+
import io
|
| 19 |
+
import json
|
| 20 |
+
import logging
|
| 21 |
+
from contextlib import contextmanager
|
| 22 |
+
from dataclasses import dataclass
|
| 23 |
+
from pathlib import Path
|
| 24 |
+
from typing import (
|
| 25 |
+
TYPE_CHECKING,
|
| 26 |
+
Any,
|
| 27 |
+
AsyncIterable,
|
| 28 |
+
BinaryIO,
|
| 29 |
+
ContextManager,
|
| 30 |
+
Dict,
|
| 31 |
+
Generator,
|
| 32 |
+
Iterable,
|
| 33 |
+
List,
|
| 34 |
+
Literal,
|
| 35 |
+
NoReturn,
|
| 36 |
+
Optional,
|
| 37 |
+
Union,
|
| 38 |
+
overload,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
from requests import HTTPError
|
| 42 |
+
|
| 43 |
+
from huggingface_hub.errors import (
|
| 44 |
+
GenerationError,
|
| 45 |
+
IncompleteGenerationError,
|
| 46 |
+
OverloadedError,
|
| 47 |
+
TextGenerationError,
|
| 48 |
+
UnknownError,
|
| 49 |
+
ValidationError,
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
from ..utils import get_session, is_aiohttp_available, is_numpy_available, is_pillow_available
|
| 53 |
+
from ._generated.types import ChatCompletionStreamOutput, TextGenerationStreamOutput
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
if TYPE_CHECKING:
|
| 57 |
+
from aiohttp import ClientResponse, ClientSession
|
| 58 |
+
from PIL.Image import Image
|
| 59 |
+
|
| 60 |
+
# TYPES
|
| 61 |
+
UrlT = str
|
| 62 |
+
PathT = Union[str, Path]
|
| 63 |
+
BinaryT = Union[bytes, BinaryIO]
|
| 64 |
+
ContentT = Union[BinaryT, PathT, UrlT]
|
| 65 |
+
|
| 66 |
+
# Use to set a Accept: image/png header
|
| 67 |
+
TASKS_EXPECTING_IMAGES = {"text-to-image", "image-to-image"}
|
| 68 |
+
|
| 69 |
+
logger = logging.getLogger(__name__)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@dataclass
|
| 73 |
+
class RequestParameters:
|
| 74 |
+
url: str
|
| 75 |
+
task: str
|
| 76 |
+
model: Optional[str]
|
| 77 |
+
json: Optional[Union[str, Dict, List]]
|
| 78 |
+
data: Optional[ContentT]
|
| 79 |
+
headers: Dict[str, Any]
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
# Add dataclass for ModelStatus. We use this dataclass in get_model_status function.
|
| 83 |
+
@dataclass
|
| 84 |
+
class ModelStatus:
|
| 85 |
+
"""
|
| 86 |
+
This Dataclass represents the model status in the HF Inference API.
|
| 87 |
+
|
| 88 |
+
Args:
|
| 89 |
+
loaded (`bool`):
|
| 90 |
+
If the model is currently loaded into HF's Inference API. Models
|
| 91 |
+
are loaded on-demand, leading to the user's first request taking longer.
|
| 92 |
+
If a model is loaded, you can be assured that it is in a healthy state.
|
| 93 |
+
state (`str`):
|
| 94 |
+
The current state of the model. This can be 'Loaded', 'Loadable', 'TooBig'.
|
| 95 |
+
If a model's state is 'Loadable', it's not too big and has a supported
|
| 96 |
+
backend. Loadable models are automatically loaded when the user first
|
| 97 |
+
requests inference on the endpoint. This means it is transparent for the
|
| 98 |
+
user to load a model, except that the first call takes longer to complete.
|
| 99 |
+
compute_type (`Dict`):
|
| 100 |
+
Information about the compute resource the model is using or will use, such as 'gpu' type and number of
|
| 101 |
+
replicas.
|
| 102 |
+
framework (`str`):
|
| 103 |
+
The name of the framework that the model was built with, such as 'transformers'
|
| 104 |
+
or 'text-generation-inference'.
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
loaded: bool
|
| 108 |
+
state: str
|
| 109 |
+
compute_type: Dict
|
| 110 |
+
framework: str
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
## IMPORT UTILS
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def _import_aiohttp():
|
| 117 |
+
# Make sure `aiohttp` is installed on the machine.
|
| 118 |
+
if not is_aiohttp_available():
|
| 119 |
+
raise ImportError("Please install aiohttp to use `AsyncInferenceClient` (`pip install aiohttp`).")
|
| 120 |
+
import aiohttp
|
| 121 |
+
|
| 122 |
+
return aiohttp
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def _import_numpy():
|
| 126 |
+
"""Make sure `numpy` is installed on the machine."""
|
| 127 |
+
if not is_numpy_available():
|
| 128 |
+
raise ImportError("Please install numpy to use deal with embeddings (`pip install numpy`).")
|
| 129 |
+
import numpy
|
| 130 |
+
|
| 131 |
+
return numpy
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def _import_pil_image():
|
| 135 |
+
"""Make sure `PIL` is installed on the machine."""
|
| 136 |
+
if not is_pillow_available():
|
| 137 |
+
raise ImportError(
|
| 138 |
+
"Please install Pillow to use deal with images (`pip install Pillow`). If you don't want the image to be"
|
| 139 |
+
" post-processed, use `client.post(...)` and get the raw response from the server."
|
| 140 |
+
)
|
| 141 |
+
from PIL import Image
|
| 142 |
+
|
| 143 |
+
return Image
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
## ENCODING / DECODING UTILS
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
@overload
|
| 150 |
+
def _open_as_binary(
|
| 151 |
+
content: ContentT,
|
| 152 |
+
) -> ContextManager[BinaryT]: ... # means "if input is not None, output is not None"
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
@overload
|
| 156 |
+
def _open_as_binary(
|
| 157 |
+
content: Literal[None],
|
| 158 |
+
) -> ContextManager[Literal[None]]: ... # means "if input is None, output is None"
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
@contextmanager # type: ignore
|
| 162 |
+
def _open_as_binary(content: Optional[ContentT]) -> Generator[Optional[BinaryT], None, None]:
|
| 163 |
+
"""Open `content` as a binary file, either from a URL, a local path, or raw bytes.
|
| 164 |
+
|
| 165 |
+
Do nothing if `content` is None,
|
| 166 |
+
|
| 167 |
+
TODO: handle a PIL.Image as input
|
| 168 |
+
TODO: handle base64 as input
|
| 169 |
+
"""
|
| 170 |
+
# If content is a string => must be either a URL or a path
|
| 171 |
+
if isinstance(content, str):
|
| 172 |
+
if content.startswith("https://") or content.startswith("http://"):
|
| 173 |
+
logger.debug(f"Downloading content from {content}")
|
| 174 |
+
yield get_session().get(content).content # TODO: retrieve as stream and pipe to post request ?
|
| 175 |
+
return
|
| 176 |
+
content = Path(content)
|
| 177 |
+
if not content.exists():
|
| 178 |
+
raise FileNotFoundError(
|
| 179 |
+
f"File not found at {content}. If `data` is a string, it must either be a URL or a path to a local"
|
| 180 |
+
" file. To pass raw content, please encode it as bytes first."
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
# If content is a Path => open it
|
| 184 |
+
if isinstance(content, Path):
|
| 185 |
+
logger.debug(f"Opening content from {content}")
|
| 186 |
+
with content.open("rb") as f:
|
| 187 |
+
yield f
|
| 188 |
+
else:
|
| 189 |
+
# Otherwise: already a file-like object or None
|
| 190 |
+
yield content
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def _b64_encode(content: ContentT) -> str:
|
| 194 |
+
"""Encode a raw file (image, audio) into base64. Can be bytes, an opened file, a path or a URL."""
|
| 195 |
+
with _open_as_binary(content) as data:
|
| 196 |
+
data_as_bytes = data if isinstance(data, bytes) else data.read()
|
| 197 |
+
return base64.b64encode(data_as_bytes).decode()
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def _b64_to_image(encoded_image: str) -> "Image":
|
| 201 |
+
"""Parse a base64-encoded string into a PIL Image."""
|
| 202 |
+
Image = _import_pil_image()
|
| 203 |
+
return Image.open(io.BytesIO(base64.b64decode(encoded_image)))
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def _bytes_to_list(content: bytes) -> List:
|
| 207 |
+
"""Parse bytes from a Response object into a Python list.
|
| 208 |
+
|
| 209 |
+
Expects the response body to be JSON-encoded data.
|
| 210 |
+
|
| 211 |
+
NOTE: This is exactly the same implementation as `_bytes_to_dict` and will not complain if the returned data is a
|
| 212 |
+
dictionary. The only advantage of having both is to help the user (and mypy) understand what kind of data to expect.
|
| 213 |
+
"""
|
| 214 |
+
return json.loads(content.decode())
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def _bytes_to_dict(content: bytes) -> Dict:
|
| 218 |
+
"""Parse bytes from a Response object into a Python dictionary.
|
| 219 |
+
|
| 220 |
+
Expects the response body to be JSON-encoded data.
|
| 221 |
+
|
| 222 |
+
NOTE: This is exactly the same implementation as `_bytes_to_list` and will not complain if the returned data is a
|
| 223 |
+
list. The only advantage of having both is to help the user (and mypy) understand what kind of data to expect.
|
| 224 |
+
"""
|
| 225 |
+
return json.loads(content.decode())
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def _bytes_to_image(content: bytes) -> "Image":
|
| 229 |
+
"""Parse bytes from a Response object into a PIL Image.
|
| 230 |
+
|
| 231 |
+
Expects the response body to be raw bytes. To deal with b64 encoded images, use `_b64_to_image` instead.
|
| 232 |
+
"""
|
| 233 |
+
Image = _import_pil_image()
|
| 234 |
+
return Image.open(io.BytesIO(content))
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def _as_dict(response: Union[bytes, Dict]) -> Dict:
|
| 238 |
+
return json.loads(response) if isinstance(response, bytes) else response
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
## PAYLOAD UTILS
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
## STREAMING UTILS
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def _stream_text_generation_response(
|
| 248 |
+
bytes_output_as_lines: Iterable[bytes], details: bool
|
| 249 |
+
) -> Union[Iterable[str], Iterable[TextGenerationStreamOutput]]:
|
| 250 |
+
"""Used in `InferenceClient.text_generation`."""
|
| 251 |
+
# Parse ServerSentEvents
|
| 252 |
+
for byte_payload in bytes_output_as_lines:
|
| 253 |
+
try:
|
| 254 |
+
output = _format_text_generation_stream_output(byte_payload, details)
|
| 255 |
+
except StopIteration:
|
| 256 |
+
break
|
| 257 |
+
if output is not None:
|
| 258 |
+
yield output
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
async def _async_stream_text_generation_response(
|
| 262 |
+
bytes_output_as_lines: AsyncIterable[bytes], details: bool
|
| 263 |
+
) -> Union[AsyncIterable[str], AsyncIterable[TextGenerationStreamOutput]]:
|
| 264 |
+
"""Used in `AsyncInferenceClient.text_generation`."""
|
| 265 |
+
# Parse ServerSentEvents
|
| 266 |
+
async for byte_payload in bytes_output_as_lines:
|
| 267 |
+
try:
|
| 268 |
+
output = _format_text_generation_stream_output(byte_payload, details)
|
| 269 |
+
except StopIteration:
|
| 270 |
+
break
|
| 271 |
+
if output is not None:
|
| 272 |
+
yield output
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
def _format_text_generation_stream_output(
|
| 276 |
+
byte_payload: bytes, details: bool
|
| 277 |
+
) -> Optional[Union[str, TextGenerationStreamOutput]]:
|
| 278 |
+
if not byte_payload.startswith(b"data:"):
|
| 279 |
+
return None # empty line
|
| 280 |
+
|
| 281 |
+
if byte_payload.strip() == b"data: [DONE]":
|
| 282 |
+
raise StopIteration("[DONE] signal received.")
|
| 283 |
+
|
| 284 |
+
# Decode payload
|
| 285 |
+
payload = byte_payload.decode("utf-8")
|
| 286 |
+
json_payload = json.loads(payload.lstrip("data:").rstrip("/n"))
|
| 287 |
+
|
| 288 |
+
# Either an error as being returned
|
| 289 |
+
if json_payload.get("error") is not None:
|
| 290 |
+
raise _parse_text_generation_error(json_payload["error"], json_payload.get("error_type"))
|
| 291 |
+
|
| 292 |
+
# Or parse token payload
|
| 293 |
+
output = TextGenerationStreamOutput.parse_obj_as_instance(json_payload)
|
| 294 |
+
return output.token.text if not details else output
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
def _stream_chat_completion_response(
|
| 298 |
+
bytes_lines: Iterable[bytes],
|
| 299 |
+
) -> Iterable[ChatCompletionStreamOutput]:
|
| 300 |
+
"""Used in `InferenceClient.chat_completion` if model is served with TGI."""
|
| 301 |
+
for item in bytes_lines:
|
| 302 |
+
try:
|
| 303 |
+
output = _format_chat_completion_stream_output(item)
|
| 304 |
+
except StopIteration:
|
| 305 |
+
break
|
| 306 |
+
if output is not None:
|
| 307 |
+
yield output
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
async def _async_stream_chat_completion_response(
|
| 311 |
+
bytes_lines: AsyncIterable[bytes],
|
| 312 |
+
) -> AsyncIterable[ChatCompletionStreamOutput]:
|
| 313 |
+
"""Used in `AsyncInferenceClient.chat_completion`."""
|
| 314 |
+
async for item in bytes_lines:
|
| 315 |
+
try:
|
| 316 |
+
output = _format_chat_completion_stream_output(item)
|
| 317 |
+
except StopIteration:
|
| 318 |
+
break
|
| 319 |
+
if output is not None:
|
| 320 |
+
yield output
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
def _format_chat_completion_stream_output(
|
| 324 |
+
byte_payload: bytes,
|
| 325 |
+
) -> Optional[ChatCompletionStreamOutput]:
|
| 326 |
+
if not byte_payload.startswith(b"data:"):
|
| 327 |
+
return None # empty line
|
| 328 |
+
|
| 329 |
+
if byte_payload.strip() == b"data: [DONE]":
|
| 330 |
+
raise StopIteration("[DONE] signal received.")
|
| 331 |
+
|
| 332 |
+
# Decode payload
|
| 333 |
+
payload = byte_payload.decode("utf-8")
|
| 334 |
+
json_payload = json.loads(payload.lstrip("data:").rstrip("/n"))
|
| 335 |
+
|
| 336 |
+
# Either an error as being returned
|
| 337 |
+
if json_payload.get("error") is not None:
|
| 338 |
+
raise _parse_text_generation_error(json_payload["error"], json_payload.get("error_type"))
|
| 339 |
+
|
| 340 |
+
# Or parse token payload
|
| 341 |
+
return ChatCompletionStreamOutput.parse_obj_as_instance(json_payload)
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
async def _async_yield_from(client: "ClientSession", response: "ClientResponse") -> AsyncIterable[bytes]:
|
| 345 |
+
async for byte_payload in response.content:
|
| 346 |
+
yield byte_payload.strip()
|
| 347 |
+
await client.close()
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
# "TGI servers" are servers running with the `text-generation-inference` backend.
|
| 351 |
+
# This backend is the go-to solution to run large language models at scale. However,
|
| 352 |
+
# for some smaller models (e.g. "gpt2") the default `transformers` + `api-inference`
|
| 353 |
+
# solution is still in use.
|
| 354 |
+
#
|
| 355 |
+
# Both approaches have very similar APIs, but not exactly the same. What we do first in
|
| 356 |
+
# the `text_generation` method is to assume the model is served via TGI. If we realize
|
| 357 |
+
# it's not the case (i.e. we receive an HTTP 400 Bad Request), we fallback to the
|
| 358 |
+
# default API with a warning message. When that's the case, We remember the unsupported
|
| 359 |
+
# attributes for this model in the `_UNSUPPORTED_TEXT_GENERATION_KWARGS` global variable.
|
| 360 |
+
#
|
| 361 |
+
# In addition, TGI servers have a built-in API route for chat-completion, which is not
|
| 362 |
+
# available on the default API. We use this route to provide a more consistent behavior
|
| 363 |
+
# when available.
|
| 364 |
+
#
|
| 365 |
+
# For more details, see https://github.com/huggingface/text-generation-inference and
|
| 366 |
+
# https://huggingface.co/docs/api-inference/detailed_parameters#text-generation-task.
|
| 367 |
+
|
| 368 |
+
_UNSUPPORTED_TEXT_GENERATION_KWARGS: Dict[Optional[str], List[str]] = {}
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
def _set_unsupported_text_generation_kwargs(model: Optional[str], unsupported_kwargs: List[str]) -> None:
|
| 372 |
+
_UNSUPPORTED_TEXT_GENERATION_KWARGS.setdefault(model, []).extend(unsupported_kwargs)
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
def _get_unsupported_text_generation_kwargs(model: Optional[str]) -> List[str]:
|
| 376 |
+
return _UNSUPPORTED_TEXT_GENERATION_KWARGS.get(model, [])
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
# TEXT GENERATION ERRORS
|
| 380 |
+
# ----------------------
|
| 381 |
+
# Text-generation errors are parsed separately to handle as much as possible the errors returned by the text generation
|
| 382 |
+
# inference project (https://github.com/huggingface/text-generation-inference).
|
| 383 |
+
# ----------------------
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
def raise_text_generation_error(http_error: HTTPError) -> NoReturn:
|
| 387 |
+
"""
|
| 388 |
+
Try to parse text-generation-inference error message and raise HTTPError in any case.
|
| 389 |
+
|
| 390 |
+
Args:
|
| 391 |
+
error (`HTTPError`):
|
| 392 |
+
The HTTPError that have been raised.
|
| 393 |
+
"""
|
| 394 |
+
# Try to parse a Text Generation Inference error
|
| 395 |
+
|
| 396 |
+
try:
|
| 397 |
+
# Hacky way to retrieve payload in case of aiohttp error
|
| 398 |
+
payload = getattr(http_error, "response_error_payload", None) or http_error.response.json()
|
| 399 |
+
error = payload.get("error")
|
| 400 |
+
error_type = payload.get("error_type")
|
| 401 |
+
except Exception: # no payload
|
| 402 |
+
raise http_error
|
| 403 |
+
|
| 404 |
+
# If error_type => more information than `hf_raise_for_status`
|
| 405 |
+
if error_type is not None:
|
| 406 |
+
exception = _parse_text_generation_error(error, error_type)
|
| 407 |
+
raise exception from http_error
|
| 408 |
+
|
| 409 |
+
# Otherwise, fallback to default error
|
| 410 |
+
raise http_error
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
def _parse_text_generation_error(error: Optional[str], error_type: Optional[str]) -> TextGenerationError:
|
| 414 |
+
if error_type == "generation":
|
| 415 |
+
return GenerationError(error) # type: ignore
|
| 416 |
+
if error_type == "incomplete_generation":
|
| 417 |
+
return IncompleteGenerationError(error) # type: ignore
|
| 418 |
+
if error_type == "overloaded":
|
| 419 |
+
return OverloadedError(error) # type: ignore
|
| 420 |
+
if error_type == "validation":
|
| 421 |
+
return ValidationError(error) # type: ignore
|
| 422 |
+
return UnknownError(error) # type: ignore
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (188 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/audio_to_audio.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Inference code generated from the JSON schema spec in @huggingface/tasks.
|
| 2 |
+
#
|
| 3 |
+
# See:
|
| 4 |
+
# - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
|
| 5 |
+
# - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
|
| 6 |
+
from typing import Any
|
| 7 |
+
|
| 8 |
+
from .base import BaseInferenceType, dataclass_with_extra
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@dataclass_with_extra
|
| 12 |
+
class AudioToAudioInput(BaseInferenceType):
|
| 13 |
+
"""Inputs for Audio to Audio inference"""
|
| 14 |
+
|
| 15 |
+
inputs: Any
|
| 16 |
+
"""The input audio data"""
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@dataclass_with_extra
|
| 20 |
+
class AudioToAudioOutputElement(BaseInferenceType):
|
| 21 |
+
"""Outputs of inference for the Audio To Audio task
|
| 22 |
+
A generated audio file with its label.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
blob: Any
|
| 26 |
+
"""The generated audio file."""
|
| 27 |
+
content_type: str
|
| 28 |
+
"""The content type of audio file."""
|
| 29 |
+
label: str
|
| 30 |
+
"""The label of the audio file."""
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/text_generation.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Inference code generated from the JSON schema spec in @huggingface/tasks.
|
| 2 |
+
#
|
| 3 |
+
# See:
|
| 4 |
+
# - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
|
| 5 |
+
# - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
|
| 6 |
+
from typing import Any, List, Literal, Optional
|
| 7 |
+
|
| 8 |
+
from .base import BaseInferenceType, dataclass_with_extra
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
TypeEnum = Literal["json", "regex"]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@dataclass_with_extra
|
| 15 |
+
class TextGenerationInputGrammarType(BaseInferenceType):
|
| 16 |
+
type: "TypeEnum"
|
| 17 |
+
value: Any
|
| 18 |
+
"""A string that represents a [JSON Schema](https://json-schema.org/).
|
| 19 |
+
JSON Schema is a declarative language that allows to annotate JSON documents
|
| 20 |
+
with types and descriptions.
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@dataclass_with_extra
|
| 25 |
+
class TextGenerationInputGenerateParameters(BaseInferenceType):
|
| 26 |
+
adapter_id: Optional[str] = None
|
| 27 |
+
"""Lora adapter id"""
|
| 28 |
+
best_of: Optional[int] = None
|
| 29 |
+
"""Generate best_of sequences and return the one if the highest token logprobs."""
|
| 30 |
+
decoder_input_details: Optional[bool] = None
|
| 31 |
+
"""Whether to return decoder input token logprobs and ids."""
|
| 32 |
+
details: Optional[bool] = None
|
| 33 |
+
"""Whether to return generation details."""
|
| 34 |
+
do_sample: Optional[bool] = None
|
| 35 |
+
"""Activate logits sampling."""
|
| 36 |
+
frequency_penalty: Optional[float] = None
|
| 37 |
+
"""The parameter for frequency penalty. 1.0 means no penalty
|
| 38 |
+
Penalize new tokens based on their existing frequency in the text so far,
|
| 39 |
+
decreasing the model's likelihood to repeat the same line verbatim.
|
| 40 |
+
"""
|
| 41 |
+
grammar: Optional[TextGenerationInputGrammarType] = None
|
| 42 |
+
max_new_tokens: Optional[int] = None
|
| 43 |
+
"""Maximum number of tokens to generate."""
|
| 44 |
+
repetition_penalty: Optional[float] = None
|
| 45 |
+
"""The parameter for repetition penalty. 1.0 means no penalty.
|
| 46 |
+
See [this paper](https://arxiv.org/pdf/1909.05858.pdf) for more details.
|
| 47 |
+
"""
|
| 48 |
+
return_full_text: Optional[bool] = None
|
| 49 |
+
"""Whether to prepend the prompt to the generated text"""
|
| 50 |
+
seed: Optional[int] = None
|
| 51 |
+
"""Random sampling seed."""
|
| 52 |
+
stop: Optional[List[str]] = None
|
| 53 |
+
"""Stop generating tokens if a member of `stop` is generated."""
|
| 54 |
+
temperature: Optional[float] = None
|
| 55 |
+
"""The value used to module the logits distribution."""
|
| 56 |
+
top_k: Optional[int] = None
|
| 57 |
+
"""The number of highest probability vocabulary tokens to keep for top-k-filtering."""
|
| 58 |
+
top_n_tokens: Optional[int] = None
|
| 59 |
+
"""The number of highest probability vocabulary tokens to keep for top-n-filtering."""
|
| 60 |
+
top_p: Optional[float] = None
|
| 61 |
+
"""Top-p value for nucleus sampling."""
|
| 62 |
+
truncate: Optional[int] = None
|
| 63 |
+
"""Truncate inputs tokens to the given size."""
|
| 64 |
+
typical_p: Optional[float] = None
|
| 65 |
+
"""Typical Decoding mass
|
| 66 |
+
See [Typical Decoding for Natural Language Generation](https://arxiv.org/abs/2202.00666)
|
| 67 |
+
for more information.
|
| 68 |
+
"""
|
| 69 |
+
watermark: Optional[bool] = None
|
| 70 |
+
"""Watermarking with [A Watermark for Large Language
|
| 71 |
+
Models](https://arxiv.org/abs/2301.10226).
|
| 72 |
+
"""
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
@dataclass_with_extra
|
| 76 |
+
class TextGenerationInput(BaseInferenceType):
|
| 77 |
+
"""Text Generation Input.
|
| 78 |
+
Auto-generated from TGI specs.
|
| 79 |
+
For more details, check out
|
| 80 |
+
https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts.
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
inputs: str
|
| 84 |
+
parameters: Optional[TextGenerationInputGenerateParameters] = None
|
| 85 |
+
stream: Optional[bool] = None
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
TextGenerationOutputFinishReason = Literal["length", "eos_token", "stop_sequence"]
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@dataclass_with_extra
|
| 92 |
+
class TextGenerationOutputPrefillToken(BaseInferenceType):
|
| 93 |
+
id: int
|
| 94 |
+
logprob: float
|
| 95 |
+
text: str
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
@dataclass_with_extra
|
| 99 |
+
class TextGenerationOutputToken(BaseInferenceType):
|
| 100 |
+
id: int
|
| 101 |
+
logprob: float
|
| 102 |
+
special: bool
|
| 103 |
+
text: str
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
@dataclass_with_extra
|
| 107 |
+
class TextGenerationOutputBestOfSequence(BaseInferenceType):
|
| 108 |
+
finish_reason: "TextGenerationOutputFinishReason"
|
| 109 |
+
generated_text: str
|
| 110 |
+
generated_tokens: int
|
| 111 |
+
prefill: List[TextGenerationOutputPrefillToken]
|
| 112 |
+
tokens: List[TextGenerationOutputToken]
|
| 113 |
+
seed: Optional[int] = None
|
| 114 |
+
top_tokens: Optional[List[List[TextGenerationOutputToken]]] = None
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
@dataclass_with_extra
|
| 118 |
+
class TextGenerationOutputDetails(BaseInferenceType):
|
| 119 |
+
finish_reason: "TextGenerationOutputFinishReason"
|
| 120 |
+
generated_tokens: int
|
| 121 |
+
prefill: List[TextGenerationOutputPrefillToken]
|
| 122 |
+
tokens: List[TextGenerationOutputToken]
|
| 123 |
+
best_of_sequences: Optional[List[TextGenerationOutputBestOfSequence]] = None
|
| 124 |
+
seed: Optional[int] = None
|
| 125 |
+
top_tokens: Optional[List[List[TextGenerationOutputToken]]] = None
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
@dataclass_with_extra
|
| 129 |
+
class TextGenerationOutput(BaseInferenceType):
|
| 130 |
+
"""Text Generation Output.
|
| 131 |
+
Auto-generated from TGI specs.
|
| 132 |
+
For more details, check out
|
| 133 |
+
https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts.
|
| 134 |
+
"""
|
| 135 |
+
|
| 136 |
+
generated_text: str
|
| 137 |
+
details: Optional[TextGenerationOutputDetails] = None
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
@dataclass_with_extra
|
| 141 |
+
class TextGenerationStreamOutputStreamDetails(BaseInferenceType):
|
| 142 |
+
finish_reason: "TextGenerationOutputFinishReason"
|
| 143 |
+
generated_tokens: int
|
| 144 |
+
input_length: int
|
| 145 |
+
seed: Optional[int] = None
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
@dataclass_with_extra
|
| 149 |
+
class TextGenerationStreamOutputToken(BaseInferenceType):
|
| 150 |
+
id: int
|
| 151 |
+
logprob: float
|
| 152 |
+
special: bool
|
| 153 |
+
text: str
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
@dataclass_with_extra
|
| 157 |
+
class TextGenerationStreamOutput(BaseInferenceType):
|
| 158 |
+
"""Text Generation Stream Output.
|
| 159 |
+
Auto-generated from TGI specs.
|
| 160 |
+
For more details, check out
|
| 161 |
+
https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tgi-import.ts.
|
| 162 |
+
"""
|
| 163 |
+
|
| 164 |
+
index: int
|
| 165 |
+
token: TextGenerationStreamOutputToken
|
| 166 |
+
details: Optional[TextGenerationStreamOutputStreamDetails] = None
|
| 167 |
+
generated_text: Optional[str] = None
|
| 168 |
+
top_tokens: Optional[List[TextGenerationStreamOutputToken]] = None
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__init__.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, Literal
|
| 2 |
+
|
| 3 |
+
from ._common import TaskProviderHelper
|
| 4 |
+
from .black_forest_labs import BlackForestLabsTextToImageTask
|
| 5 |
+
from .fal_ai import (
|
| 6 |
+
FalAIAutomaticSpeechRecognitionTask,
|
| 7 |
+
FalAITextToImageTask,
|
| 8 |
+
FalAITextToSpeechTask,
|
| 9 |
+
FalAITextToVideoTask,
|
| 10 |
+
)
|
| 11 |
+
from .fireworks_ai import FireworksAIConversationalTask
|
| 12 |
+
from .hf_inference import HFInferenceBinaryInputTask, HFInferenceConversational, HFInferenceTask
|
| 13 |
+
from .hyperbolic import HyperbolicTextGenerationTask, HyperbolicTextToImageTask
|
| 14 |
+
from .nebius import NebiusConversationalTask, NebiusTextGenerationTask, NebiusTextToImageTask
|
| 15 |
+
from .novita import NovitaConversationalTask, NovitaTextGenerationTask
|
| 16 |
+
from .replicate import ReplicateTask, ReplicateTextToSpeechTask
|
| 17 |
+
from .sambanova import SambanovaConversationalTask
|
| 18 |
+
from .together import TogetherConversationalTask, TogetherTextGenerationTask, TogetherTextToImageTask
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
PROVIDER_T = Literal[
|
| 22 |
+
"black-forest-labs",
|
| 23 |
+
"fal-ai",
|
| 24 |
+
"fireworks-ai",
|
| 25 |
+
"hf-inference",
|
| 26 |
+
"hyperbolic",
|
| 27 |
+
"nebius",
|
| 28 |
+
"novita",
|
| 29 |
+
"replicate",
|
| 30 |
+
"sambanova",
|
| 31 |
+
"together",
|
| 32 |
+
]
|
| 33 |
+
|
| 34 |
+
PROVIDERS: Dict[PROVIDER_T, Dict[str, TaskProviderHelper]] = {
|
| 35 |
+
"black-forest-labs": {
|
| 36 |
+
"text-to-image": BlackForestLabsTextToImageTask(),
|
| 37 |
+
},
|
| 38 |
+
"fal-ai": {
|
| 39 |
+
"automatic-speech-recognition": FalAIAutomaticSpeechRecognitionTask(),
|
| 40 |
+
"text-to-image": FalAITextToImageTask(),
|
| 41 |
+
"text-to-speech": FalAITextToSpeechTask(),
|
| 42 |
+
"text-to-video": FalAITextToVideoTask(),
|
| 43 |
+
},
|
| 44 |
+
"fireworks-ai": {
|
| 45 |
+
"conversational": FireworksAIConversationalTask(),
|
| 46 |
+
},
|
| 47 |
+
"hf-inference": {
|
| 48 |
+
"text-to-image": HFInferenceTask("text-to-image"),
|
| 49 |
+
"conversational": HFInferenceConversational(),
|
| 50 |
+
"text-generation": HFInferenceTask("text-generation"),
|
| 51 |
+
"text-classification": HFInferenceTask("text-classification"),
|
| 52 |
+
"question-answering": HFInferenceTask("question-answering"),
|
| 53 |
+
"audio-classification": HFInferenceBinaryInputTask("audio-classification"),
|
| 54 |
+
"automatic-speech-recognition": HFInferenceBinaryInputTask("automatic-speech-recognition"),
|
| 55 |
+
"fill-mask": HFInferenceTask("fill-mask"),
|
| 56 |
+
"feature-extraction": HFInferenceTask("feature-extraction"),
|
| 57 |
+
"image-classification": HFInferenceBinaryInputTask("image-classification"),
|
| 58 |
+
"image-segmentation": HFInferenceBinaryInputTask("image-segmentation"),
|
| 59 |
+
"document-question-answering": HFInferenceTask("document-question-answering"),
|
| 60 |
+
"image-to-text": HFInferenceBinaryInputTask("image-to-text"),
|
| 61 |
+
"object-detection": HFInferenceBinaryInputTask("object-detection"),
|
| 62 |
+
"audio-to-audio": HFInferenceBinaryInputTask("audio-to-audio"),
|
| 63 |
+
"zero-shot-image-classification": HFInferenceBinaryInputTask("zero-shot-image-classification"),
|
| 64 |
+
"zero-shot-classification": HFInferenceTask("zero-shot-classification"),
|
| 65 |
+
"image-to-image": HFInferenceBinaryInputTask("image-to-image"),
|
| 66 |
+
"sentence-similarity": HFInferenceTask("sentence-similarity"),
|
| 67 |
+
"table-question-answering": HFInferenceTask("table-question-answering"),
|
| 68 |
+
"tabular-classification": HFInferenceTask("tabular-classification"),
|
| 69 |
+
"text-to-speech": HFInferenceTask("text-to-speech"),
|
| 70 |
+
"token-classification": HFInferenceTask("token-classification"),
|
| 71 |
+
"translation": HFInferenceTask("translation"),
|
| 72 |
+
"summarization": HFInferenceTask("summarization"),
|
| 73 |
+
"visual-question-answering": HFInferenceBinaryInputTask("visual-question-answering"),
|
| 74 |
+
},
|
| 75 |
+
"hyperbolic": {
|
| 76 |
+
"text-to-image": HyperbolicTextToImageTask(),
|
| 77 |
+
"conversational": HyperbolicTextGenerationTask("conversational"),
|
| 78 |
+
"text-generation": HyperbolicTextGenerationTask("text-generation"),
|
| 79 |
+
},
|
| 80 |
+
"nebius": {
|
| 81 |
+
"text-to-image": NebiusTextToImageTask(),
|
| 82 |
+
"conversational": NebiusConversationalTask(),
|
| 83 |
+
"text-generation": NebiusTextGenerationTask(),
|
| 84 |
+
},
|
| 85 |
+
"novita": {
|
| 86 |
+
"text-generation": NovitaTextGenerationTask(),
|
| 87 |
+
"conversational": NovitaConversationalTask(),
|
| 88 |
+
},
|
| 89 |
+
"replicate": {
|
| 90 |
+
"text-to-image": ReplicateTask("text-to-image"),
|
| 91 |
+
"text-to-speech": ReplicateTextToSpeechTask(),
|
| 92 |
+
"text-to-video": ReplicateTask("text-to-video"),
|
| 93 |
+
},
|
| 94 |
+
"sambanova": {
|
| 95 |
+
"conversational": SambanovaConversationalTask(),
|
| 96 |
+
},
|
| 97 |
+
"together": {
|
| 98 |
+
"text-to-image": TogetherTextToImageTask(),
|
| 99 |
+
"conversational": TogetherConversationalTask(),
|
| 100 |
+
"text-generation": TogetherTextGenerationTask(),
|
| 101 |
+
},
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def get_provider_helper(provider: PROVIDER_T, task: str) -> TaskProviderHelper:
|
| 106 |
+
"""Get provider helper instance by name and task.
|
| 107 |
+
|
| 108 |
+
Args:
|
| 109 |
+
provider (str): Name of the provider
|
| 110 |
+
task (str): Name of the task
|
| 111 |
+
|
| 112 |
+
Returns:
|
| 113 |
+
TaskProviderHelper: Helper instance for the specified provider and task
|
| 114 |
+
|
| 115 |
+
Raises:
|
| 116 |
+
ValueError: If provider or task is not supported
|
| 117 |
+
"""
|
| 118 |
+
if provider not in PROVIDERS:
|
| 119 |
+
raise ValueError(f"Provider '{provider}' not supported. Available providers: {list(PROVIDERS.keys())}")
|
| 120 |
+
if task not in PROVIDERS[provider]:
|
| 121 |
+
raise ValueError(
|
| 122 |
+
f"Task '{task}' not supported for provider '{provider}'. "
|
| 123 |
+
f"Available tasks: {list(PROVIDERS[provider].keys())}"
|
| 124 |
+
)
|
| 125 |
+
return PROVIDERS[provider][task]
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (3.38 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/_common.cpython-310.pyc
ADDED
|
Binary file (8.93 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/black_forest_labs.cpython-310.pyc
ADDED
|
Binary file (2.98 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/fal_ai.cpython-310.pyc
ADDED
|
Binary file (4.52 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/fireworks_ai.cpython-310.pyc
ADDED
|
Binary file (692 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/hf_inference.cpython-310.pyc
ADDED
|
Binary file (5.54 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/hyperbolic.cpython-310.pyc
ADDED
|
Binary file (2.4 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/nebius.cpython-310.pyc
ADDED
|
Binary file (2.4 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/novita.cpython-310.pyc
ADDED
|
Binary file (1.36 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/replicate.cpython-310.pyc
ADDED
|
Binary file (2.97 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/sambanova.cpython-310.pyc
ADDED
|
Binary file (709 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/__pycache__/together.cpython-310.pyc
ADDED
|
Binary file (3.07 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/_common.py
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from functools import lru_cache
|
| 2 |
+
from typing import Any, Dict, Optional, Union
|
| 3 |
+
|
| 4 |
+
from huggingface_hub import constants
|
| 5 |
+
from huggingface_hub.inference._common import RequestParameters
|
| 6 |
+
from huggingface_hub.utils import build_hf_headers, get_token, logging
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
logger = logging.get_logger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
# Dev purposes only.
|
| 13 |
+
# If you want to try to run inference for a new model locally before it's registered on huggingface.co
|
| 14 |
+
# for a given Inference Provider, you can add it to the following dictionary.
|
| 15 |
+
HARDCODED_MODEL_ID_MAPPING: Dict[str, Dict[str, str]] = {
|
| 16 |
+
# "HF model ID" => "Model ID on Inference Provider's side"
|
| 17 |
+
#
|
| 18 |
+
# Example:
|
| 19 |
+
# "Qwen/Qwen2.5-Coder-32B-Instruct": "Qwen2.5-Coder-32B-Instruct",
|
| 20 |
+
"fal-ai": {},
|
| 21 |
+
"fireworks-ai": {},
|
| 22 |
+
"hf-inference": {},
|
| 23 |
+
"hyperbolic": {},
|
| 24 |
+
"nebius": {},
|
| 25 |
+
"replicate": {},
|
| 26 |
+
"sambanova": {},
|
| 27 |
+
"together": {},
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def filter_none(d: Dict[str, Any]) -> Dict[str, Any]:
|
| 32 |
+
return {k: v for k, v in d.items() if v is not None}
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class TaskProviderHelper:
|
| 36 |
+
"""Base class for task-specific provider helpers."""
|
| 37 |
+
|
| 38 |
+
def __init__(self, provider: str, base_url: str, task: str) -> None:
|
| 39 |
+
self.provider = provider
|
| 40 |
+
self.task = task
|
| 41 |
+
self.base_url = base_url
|
| 42 |
+
|
| 43 |
+
def prepare_request(
|
| 44 |
+
self,
|
| 45 |
+
*,
|
| 46 |
+
inputs: Any,
|
| 47 |
+
parameters: Dict[str, Any],
|
| 48 |
+
headers: Dict,
|
| 49 |
+
model: Optional[str],
|
| 50 |
+
api_key: Optional[str],
|
| 51 |
+
extra_payload: Optional[Dict[str, Any]] = None,
|
| 52 |
+
) -> RequestParameters:
|
| 53 |
+
"""
|
| 54 |
+
Prepare the request to be sent to the provider.
|
| 55 |
+
|
| 56 |
+
Each step (api_key, model, headers, url, payload) can be customized in subclasses.
|
| 57 |
+
"""
|
| 58 |
+
# api_key from user, or local token, or raise error
|
| 59 |
+
api_key = self._prepare_api_key(api_key)
|
| 60 |
+
|
| 61 |
+
# mapped model from HF model ID
|
| 62 |
+
mapped_model = self._prepare_mapped_model(model)
|
| 63 |
+
|
| 64 |
+
# default HF headers + user headers (to customize in subclasses)
|
| 65 |
+
headers = self._prepare_headers(headers, api_key)
|
| 66 |
+
|
| 67 |
+
# routed URL if HF token, or direct URL (to customize in '_prepare_route' in subclasses)
|
| 68 |
+
url = self._prepare_url(api_key, mapped_model)
|
| 69 |
+
|
| 70 |
+
# prepare payload (to customize in subclasses)
|
| 71 |
+
payload = self._prepare_payload_as_dict(inputs, parameters, mapped_model=mapped_model)
|
| 72 |
+
if payload is not None:
|
| 73 |
+
payload = recursive_merge(payload, extra_payload or {})
|
| 74 |
+
|
| 75 |
+
# body data (to customize in subclasses)
|
| 76 |
+
data = self._prepare_payload_as_bytes(inputs, parameters, mapped_model, extra_payload)
|
| 77 |
+
|
| 78 |
+
# check if both payload and data are set and return
|
| 79 |
+
if payload is not None and data is not None:
|
| 80 |
+
raise ValueError("Both payload and data cannot be set in the same request.")
|
| 81 |
+
if payload is None and data is None:
|
| 82 |
+
raise ValueError("Either payload or data must be set in the request.")
|
| 83 |
+
return RequestParameters(url=url, task=self.task, model=mapped_model, json=payload, data=data, headers=headers)
|
| 84 |
+
|
| 85 |
+
def get_response(self, response: Union[bytes, Dict]) -> Any:
|
| 86 |
+
"""
|
| 87 |
+
Return the response in the expected format.
|
| 88 |
+
|
| 89 |
+
Override this method in subclasses for customized response handling."""
|
| 90 |
+
return response
|
| 91 |
+
|
| 92 |
+
def _prepare_api_key(self, api_key: Optional[str]) -> str:
|
| 93 |
+
"""Return the API key to use for the request.
|
| 94 |
+
|
| 95 |
+
Usually not overwritten in subclasses."""
|
| 96 |
+
if api_key is None:
|
| 97 |
+
api_key = get_token()
|
| 98 |
+
if api_key is None:
|
| 99 |
+
raise ValueError(
|
| 100 |
+
f"You must provide an api_key to work with {self.provider} API or log in with `huggingface-cli login`."
|
| 101 |
+
)
|
| 102 |
+
return api_key
|
| 103 |
+
|
| 104 |
+
def _prepare_mapped_model(self, model: Optional[str]) -> str:
|
| 105 |
+
"""Return the mapped model ID to use for the request.
|
| 106 |
+
|
| 107 |
+
Usually not overwritten in subclasses."""
|
| 108 |
+
if model is None:
|
| 109 |
+
raise ValueError(f"Please provide an HF model ID supported by {self.provider}.")
|
| 110 |
+
|
| 111 |
+
# hardcoded mapping for local testing
|
| 112 |
+
if HARDCODED_MODEL_ID_MAPPING.get(self.provider, {}).get(model):
|
| 113 |
+
return HARDCODED_MODEL_ID_MAPPING[self.provider][model]
|
| 114 |
+
|
| 115 |
+
provider_mapping = _fetch_inference_provider_mapping(model).get(self.provider)
|
| 116 |
+
if provider_mapping is None:
|
| 117 |
+
raise ValueError(f"Model {model} is not supported by provider {self.provider}.")
|
| 118 |
+
|
| 119 |
+
if provider_mapping.task != self.task:
|
| 120 |
+
raise ValueError(
|
| 121 |
+
f"Model {model} is not supported for task {self.task} and provider {self.provider}. "
|
| 122 |
+
f"Supported task: {provider_mapping.task}."
|
| 123 |
+
)
|
| 124 |
+
|
| 125 |
+
if provider_mapping.status == "staging":
|
| 126 |
+
logger.warning(
|
| 127 |
+
f"Model {model} is in staging mode for provider {self.provider}. Meant for test purposes only."
|
| 128 |
+
)
|
| 129 |
+
return provider_mapping.provider_id
|
| 130 |
+
|
| 131 |
+
def _prepare_headers(self, headers: Dict, api_key: str) -> Dict:
|
| 132 |
+
"""Return the headers to use for the request.
|
| 133 |
+
|
| 134 |
+
Override this method in subclasses for customized headers.
|
| 135 |
+
"""
|
| 136 |
+
return {**build_hf_headers(token=api_key), **headers}
|
| 137 |
+
|
| 138 |
+
def _prepare_url(self, api_key: str, mapped_model: str) -> str:
|
| 139 |
+
"""Return the URL to use for the request.
|
| 140 |
+
|
| 141 |
+
Usually not overwritten in subclasses."""
|
| 142 |
+
base_url = self._prepare_base_url(api_key)
|
| 143 |
+
route = self._prepare_route(mapped_model)
|
| 144 |
+
return f"{base_url.rstrip('/')}/{route.lstrip('/')}"
|
| 145 |
+
|
| 146 |
+
def _prepare_base_url(self, api_key: str) -> str:
|
| 147 |
+
"""Return the base URL to use for the request.
|
| 148 |
+
|
| 149 |
+
Usually not overwritten in subclasses."""
|
| 150 |
+
# Route to the proxy if the api_key is a HF TOKEN
|
| 151 |
+
if api_key.startswith("hf_"):
|
| 152 |
+
logger.info(f"Calling '{self.provider}' provider through Hugging Face router.")
|
| 153 |
+
return constants.INFERENCE_PROXY_TEMPLATE.format(provider=self.provider)
|
| 154 |
+
else:
|
| 155 |
+
logger.info(f"Calling '{self.provider}' provider directly.")
|
| 156 |
+
return self.base_url
|
| 157 |
+
|
| 158 |
+
def _prepare_route(self, mapped_model: str) -> str:
|
| 159 |
+
"""Return the route to use for the request.
|
| 160 |
+
|
| 161 |
+
Override this method in subclasses for customized routes.
|
| 162 |
+
"""
|
| 163 |
+
return ""
|
| 164 |
+
|
| 165 |
+
def _prepare_payload_as_dict(self, inputs: Any, parameters: Dict, mapped_model: str) -> Optional[Dict]:
|
| 166 |
+
"""Return the payload to use for the request, as a dict.
|
| 167 |
+
|
| 168 |
+
Override this method in subclasses for customized payloads.
|
| 169 |
+
Only one of `_prepare_payload_as_dict` and `_prepare_payload_as_bytes` should return a value.
|
| 170 |
+
"""
|
| 171 |
+
return None
|
| 172 |
+
|
| 173 |
+
def _prepare_payload_as_bytes(
|
| 174 |
+
self, inputs: Any, parameters: Dict, mapped_model: str, extra_payload: Optional[Dict]
|
| 175 |
+
) -> Optional[bytes]:
|
| 176 |
+
"""Return the body to use for the request, as bytes.
|
| 177 |
+
|
| 178 |
+
Override this method in subclasses for customized body data.
|
| 179 |
+
Only one of `_prepare_payload_as_dict` and `_prepare_payload_as_bytes` should return a value.
|
| 180 |
+
"""
|
| 181 |
+
return None
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class BaseConversationalTask(TaskProviderHelper):
|
| 185 |
+
"""
|
| 186 |
+
Base class for conversational (chat completion) tasks.
|
| 187 |
+
The schema follows the OpenAI API format defined here: https://platform.openai.com/docs/api-reference/chat
|
| 188 |
+
"""
|
| 189 |
+
|
| 190 |
+
def __init__(self, provider: str, base_url: str):
|
| 191 |
+
super().__init__(provider=provider, base_url=base_url, task="conversational")
|
| 192 |
+
|
| 193 |
+
def _prepare_route(self, mapped_model: str) -> str:
|
| 194 |
+
return "/v1/chat/completions"
|
| 195 |
+
|
| 196 |
+
def _prepare_payload_as_dict(self, inputs: Any, parameters: Dict, mapped_model: str) -> Optional[Dict]:
|
| 197 |
+
return {"messages": inputs, **filter_none(parameters), "model": mapped_model}
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class BaseTextGenerationTask(TaskProviderHelper):
|
| 201 |
+
"""
|
| 202 |
+
Base class for text-generation (completion) tasks.
|
| 203 |
+
The schema follows the OpenAI API format defined here: https://platform.openai.com/docs/api-reference/completions
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
def __init__(self, provider: str, base_url: str):
|
| 207 |
+
super().__init__(provider=provider, base_url=base_url, task="text-generation")
|
| 208 |
+
|
| 209 |
+
def _prepare_route(self, mapped_model: str) -> str:
|
| 210 |
+
return "/v1/completions"
|
| 211 |
+
|
| 212 |
+
def _prepare_payload_as_dict(self, inputs: Any, parameters: Dict, mapped_model: str) -> Optional[Dict]:
|
| 213 |
+
return {"prompt": inputs, **filter_none(parameters), "model": mapped_model}
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
@lru_cache(maxsize=None)
|
| 217 |
+
def _fetch_inference_provider_mapping(model: str) -> Dict:
|
| 218 |
+
"""
|
| 219 |
+
Fetch provider mappings for a model from the Hub.
|
| 220 |
+
"""
|
| 221 |
+
from huggingface_hub.hf_api import HfApi
|
| 222 |
+
|
| 223 |
+
info = HfApi().model_info(model, expand=["inferenceProviderMapping"])
|
| 224 |
+
provider_mapping = info.inference_provider_mapping
|
| 225 |
+
if provider_mapping is None:
|
| 226 |
+
raise ValueError(f"No provider mapping found for model {model}")
|
| 227 |
+
return provider_mapping
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
def recursive_merge(dict1: Dict, dict2: Dict) -> Dict:
|
| 231 |
+
return {
|
| 232 |
+
**dict1,
|
| 233 |
+
**{
|
| 234 |
+
key: recursive_merge(dict1[key], value)
|
| 235 |
+
if (key in dict1 and isinstance(dict1[key], dict) and isinstance(value, dict))
|
| 236 |
+
else value
|
| 237 |
+
for key, value in dict2.items()
|
| 238 |
+
},
|
| 239 |
+
}
|
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_providers/black_forest_labs.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import time
|
| 2 |
+
from typing import Any, Dict, Optional, Union
|
| 3 |
+
|
| 4 |
+
from huggingface_hub.inference._common import _as_dict
|
| 5 |
+
from huggingface_hub.inference._providers._common import TaskProviderHelper, filter_none
|
| 6 |
+
from huggingface_hub.utils import logging
|
| 7 |
+
from huggingface_hub.utils._http import get_session
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
logger = logging.get_logger(__name__)
|
| 11 |
+
|
| 12 |
+
MAX_POLLING_ATTEMPTS = 6
|
| 13 |
+
POLLING_INTERVAL = 1.0
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class BlackForestLabsTextToImageTask(TaskProviderHelper):
|
| 17 |
+
def __init__(self):
|
| 18 |
+
super().__init__(provider="black-forest-labs", base_url="https://api.us1.bfl.ai/v1", task="text-to-image")
|
| 19 |
+
|
| 20 |
+
def _prepare_headers(self, headers: Dict, api_key: str) -> Dict:
|
| 21 |
+
headers = super()._prepare_headers(headers, api_key)
|
| 22 |
+
if not api_key.startswith("hf_"):
|
| 23 |
+
_ = headers.pop("authorization")
|
| 24 |
+
headers["X-Key"] = api_key
|
| 25 |
+
return headers
|
| 26 |
+
|
| 27 |
+
def _prepare_route(self, mapped_model: str) -> str:
|
| 28 |
+
return mapped_model
|
| 29 |
+
|
| 30 |
+
def _prepare_payload_as_dict(self, inputs: Any, parameters: Dict, mapped_model: str) -> Optional[Dict]:
|
| 31 |
+
parameters = filter_none(parameters)
|
| 32 |
+
if "num_inference_steps" in parameters:
|
| 33 |
+
parameters["steps"] = parameters.pop("num_inference_steps")
|
| 34 |
+
if "guidance_scale" in parameters:
|
| 35 |
+
parameters["guidance"] = parameters.pop("guidance_scale")
|
| 36 |
+
|
| 37 |
+
return {"prompt": inputs, **parameters}
|
| 38 |
+
|
| 39 |
+
def get_response(self, response: Union[bytes, Dict]) -> Any:
|
| 40 |
+
"""
|
| 41 |
+
Polling mechanism for Black Forest Labs since the API is asynchronous.
|
| 42 |
+
"""
|
| 43 |
+
url = _as_dict(response).get("polling_url")
|
| 44 |
+
session = get_session()
|
| 45 |
+
for _ in range(MAX_POLLING_ATTEMPTS):
|
| 46 |
+
time.sleep(POLLING_INTERVAL)
|
| 47 |
+
|
| 48 |
+
response = session.get(url, headers={"Content-Type": "application/json"}) # type: ignore
|
| 49 |
+
response.raise_for_status() # type: ignore
|
| 50 |
+
response_json: Dict = response.json() # type: ignore
|
| 51 |
+
status = response_json.get("status")
|
| 52 |
+
logger.info(
|
| 53 |
+
f"Polling generation result from {url}. Current status: {status}. "
|
| 54 |
+
f"Will retry after {POLLING_INTERVAL} seconds if not ready."
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
if (
|
| 58 |
+
status == "Ready"
|
| 59 |
+
and isinstance(response_json.get("result"), dict)
|
| 60 |
+
and (sample_url := response_json["result"].get("sample"))
|
| 61 |
+
):
|
| 62 |
+
image_resp = session.get(sample_url)
|
| 63 |
+
image_resp.raise_for_status()
|
| 64 |
+
return image_resp.content
|
| 65 |
+
|
| 66 |
+
raise TimeoutError(f"Failed to get the image URL after {MAX_POLLING_ATTEMPTS} attempts.")
|